code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
#5.8-5.9
users = ['user1', 'user2', 'user3', 'user4', 'admin']
#users = []
if users:
for user in users:
if user == 'admin':
print(f"Hello, {user}, would you like to see a status report?")
else:
print(f"Hello, {user}, thank you for logging in again")
else:
print("We need to ind some users!")
#5.10
current_users = ['name1', 'name2', 'name3', 'name4', 'name5']
new_users = ['naMe5', 'name6', 'name7', 'name8', 'name9', 'Name1']
for new_user in new_users:
if new_user.lower() in current_users:
print(f"Sorry, this name - "
f"{new_user.title()} is used. Try again with another name.")
else:
print(f"You can use this name - {new_user.title()}.")
#5.11
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]
for number in numbers:
if number == 1:
print(f"1st")
elif number == 2:
print("2nd")
elif number == 3:
print("3rd")
else:
print(f"{number}th")
|
normal
|
{
"blob_id": "c355be4e05d1df7f5d6f2e32bbb5a8086babe95b",
"index": 7946,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif users:\n for user in users:\n if user == 'admin':\n print(f'Hello, {user}, would you like to see a status report?')\n else:\n print(f'Hello, {user}, thank you for logging in again')\nelse:\n print('We need to ind some users!')\n<mask token>\nfor new_user in new_users:\n if new_user.lower() in current_users:\n print(\n f'Sorry, this name - {new_user.title()} is used. Try again with another name.'\n )\n else:\n print(f'You can use this name - {new_user.title()}.')\n<mask token>\nfor number in numbers:\n if number == 1:\n print(f'1st')\n elif number == 2:\n print('2nd')\n elif number == 3:\n print('3rd')\n else:\n print(f'{number}th')\n",
"step-3": "users = ['user1', 'user2', 'user3', 'user4', 'admin']\nif users:\n for user in users:\n if user == 'admin':\n print(f'Hello, {user}, would you like to see a status report?')\n else:\n print(f'Hello, {user}, thank you for logging in again')\nelse:\n print('We need to ind some users!')\ncurrent_users = ['name1', 'name2', 'name3', 'name4', 'name5']\nnew_users = ['naMe5', 'name6', 'name7', 'name8', 'name9', 'Name1']\nfor new_user in new_users:\n if new_user.lower() in current_users:\n print(\n f'Sorry, this name - {new_user.title()} is used. Try again with another name.'\n )\n else:\n print(f'You can use this name - {new_user.title()}.')\nnumbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]\nfor number in numbers:\n if number == 1:\n print(f'1st')\n elif number == 2:\n print('2nd')\n elif number == 3:\n print('3rd')\n else:\n print(f'{number}th')\n",
"step-4": "#5.8-5.9\nusers = ['user1', 'user2', 'user3', 'user4', 'admin']\n#users = []\nif users:\n for user in users:\n if user == 'admin':\n print(f\"Hello, {user}, would you like to see a status report?\")\n else:\n print(f\"Hello, {user}, thank you for logging in again\")\nelse:\n print(\"We need to ind some users!\")\n\n#5.10\ncurrent_users = ['name1', 'name2', 'name3', 'name4', 'name5']\nnew_users = ['naMe5', 'name6', 'name7', 'name8', 'name9', 'Name1']\nfor new_user in new_users:\n if new_user.lower() in current_users:\n print(f\"Sorry, this name - \"\n f\"{new_user.title()} is used. Try again with another name.\")\n else:\n print(f\"You can use this name - {new_user.title()}.\")\n#5.11\nnumbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]\nfor number in numbers:\n if number == 1:\n print(f\"1st\")\n elif number == 2:\n print(\"2nd\")\n elif number == 3:\n print(\"3rd\")\n else:\n print(f\"{number}th\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def construct_param_dict(params, K_RC, K_CP, m_P):
"""
Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w
"""
w = params['w']
pd = params['pd']
pv = params['pv']
Er = params['Er']
Ek = params['Ek']
ER = params['ER']
EC = params['EC']
EP = params['EP']
Eq1 = params['Eq1']
Eq2 = params['Eq2']
a = params['a']
b = params['b']
c = params['c']
formC = params['formC']
formPC = params['formPC']
formPR = params['formPR']
TR = params['TR']
TC = params['TC']
TP = params['TP']
D_R = params['D_R']
D_C = params['D_C']
K_RP = K_RC * K_CP
fmC = params['fmC']
thermyR = params['thermyR']
thermyC = params['thermyC']
thermyP = params['thermyP']
fmPR = params['fmPR']
fmPC = params['fmPC']
m_C = K_CP * m_P
m_R = K_RP * m_P
r0 = params['r0']
k0 = params['k0']
a01 = a02 = params['a012']
a03 = params['a03']
d0 = params['d0']
q10 = params['q10']
q20 = params['q20']
v0R = params['v0R']
v0C = params['v0C']
v0P = params['v0P']
k = b_k
hC0 = params['hC0']
hP0 = params['hP0']
q1 = set_q1(q10, m_C, w, Eq1, TR, k)
q2 = set_q2(q20, m_P, w, Eq2, TC, k)
K = set_K(k0, m_R, w, Ek, TR, k)
r = set_r(r0, m_R, w, Er, TR, k)
a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,
alfa, fmC, thermyR, thermyC, k, a, b, c, formC)
a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,
alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)
a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,
alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)
t_hp = set_th(hP0, m_P, w, EP, k, TP)
t_hc = set_th(hC0, m_C, w, EC, k, TC)
param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,
'a3': a3, 't_hp': t_hp, 't_hc': t_hc}
return param_dict
<|reserved_special_token_0|>
def Trophic_position(params, par_dict, eq_dict):
R_eq = eq_dict['R_eq']
a2 = par_dict['a2']
q2 = par_dict['q2']
e2 = params['e2']
MTP_C = set_MTP_C(R_eq, a2, q2, e2)
return MTP_C
def Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq = eq_dict['R_eq']
C_eq = eq_dict['C_eq']
P_eq = eq_dict['P_eq']
D = set_D(K, a1, a2, a3, e1, e2, e3, r)
d1 = set_d1(r, R_eq, K)
d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)
d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)
hd2 = set_hdet2(d1, d2, d3)
return hd2
<|reserved_special_token_0|>
def Jacobian2(dX, dY, X, Y):
A = Matrix([dX, dY])
B = Matrix([X, Y])
return A.jacobian(B)
<|reserved_special_token_0|>
def ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
r = par_dict['r']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
m_C = K_CP * m_P
q20 = params['q20']
q10 = params['q10']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hp = par_dict['t_hp']
t_hc = par_dict['t_hc']
hC0 = params['hC0']
hP0 = params['hP0']
dRLV = set_dRLV(R, C, P, r, K, a1, a2)
dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)
dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)
dRLVP = set_dRLVPart(R, P, r, K, a2)
dPLVP = set_dPredLV(R, P, a2, e2, q2)
dRLVC = set_dRLVPart(R, C, r, K, a1)
dCLVC = set_dPredLV(R, C, a1, e1, q1)
dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,
q1, q2)
dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)
dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)
dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)
CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)
CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)
PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
C_eq_RM = CNum_eq_RM / CDen_eq_RM
P_eq_RM = PNum_eq_RM / PDen_eq_RM
RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,
a3, e1, e2, e3, q1, q2)
DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,
'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,
'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,
'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':
dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,
'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}
return DynamicsDict
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def construct_param_dict(params, K_RC, K_CP, m_P):
"""
Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w
"""
w = params['w']
pd = params['pd']
pv = params['pv']
Er = params['Er']
Ek = params['Ek']
ER = params['ER']
EC = params['EC']
EP = params['EP']
Eq1 = params['Eq1']
Eq2 = params['Eq2']
a = params['a']
b = params['b']
c = params['c']
formC = params['formC']
formPC = params['formPC']
formPR = params['formPR']
TR = params['TR']
TC = params['TC']
TP = params['TP']
D_R = params['D_R']
D_C = params['D_C']
K_RP = K_RC * K_CP
fmC = params['fmC']
thermyR = params['thermyR']
thermyC = params['thermyC']
thermyP = params['thermyP']
fmPR = params['fmPR']
fmPC = params['fmPC']
m_C = K_CP * m_P
m_R = K_RP * m_P
r0 = params['r0']
k0 = params['k0']
a01 = a02 = params['a012']
a03 = params['a03']
d0 = params['d0']
q10 = params['q10']
q20 = params['q20']
v0R = params['v0R']
v0C = params['v0C']
v0P = params['v0P']
k = b_k
hC0 = params['hC0']
hP0 = params['hP0']
q1 = set_q1(q10, m_C, w, Eq1, TR, k)
q2 = set_q2(q20, m_P, w, Eq2, TC, k)
K = set_K(k0, m_R, w, Ek, TR, k)
r = set_r(r0, m_R, w, Er, TR, k)
a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,
alfa, fmC, thermyR, thermyC, k, a, b, c, formC)
a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,
alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)
a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,
alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)
t_hp = set_th(hP0, m_P, w, EP, k, TP)
t_hc = set_th(hC0, m_C, w, EC, k, TC)
param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,
'a3': a3, 't_hp': t_hp, 't_hc': t_hc}
return param_dict
def construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):
"""
Construct all the functions related to the computation of equilibrium values in the model, in any subsytem
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
q1_0 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)
R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)
R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)
R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)
R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
D = setD(K, a1, a2, a3, e1, e2, e3, r)
DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)
R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
R2 = (bR + sqrt(Dis)) / (2 * denR)
R3 = (bR - sqrt(Dis)) / (2 * denR)
eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,
'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,
'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':
R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,
'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}
return eq_dict
def construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):
"""
Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
m_C = K_CP * m_P
q10 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
R_eq_s2 = eq_dict['R_eq_s2']
C_eq_s2 = eq_dict['C_eq_s2']
P_eq_s3 = eq_dict['P_eq_s3']
R_eq_s3 = eq_dict['R_eq_s3']
R_eq_s2RM = eq_dict['R_eq_s2RM']
C_eq_s2RM = eq_dict['C_eq_s2RM']
R_eq_s3RM = eq_dict['R_eq_s3RM']
P_eq_s3RM = eq_dict['P_eq_s3RM']
I_C_s2 = set_I_C_s2(e1, a1, K, q1)
I_P_s3 = set_I_P_s3(e2, a2, K, q2)
I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)
I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)
I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)
I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)
I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)
I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,
t_hc, q10, q20, hP0, hC0)
inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,
'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,
'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}
return inv_dict
def Trophic_position(params, par_dict, eq_dict):
R_eq = eq_dict['R_eq']
a2 = par_dict['a2']
q2 = par_dict['q2']
e2 = params['e2']
MTP_C = set_MTP_C(R_eq, a2, q2, e2)
return MTP_C
def Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq = eq_dict['R_eq']
C_eq = eq_dict['C_eq']
P_eq = eq_dict['P_eq']
D = set_D(K, a1, a2, a3, e1, e2, e3, r)
d1 = set_d1(r, R_eq, K)
d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)
d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)
hd2 = set_hdet2(d1, d2, d3)
return hd2
def Jacobian(dR, dC, dP, R, C, P):
X = Matrix([dR, dC, dP])
Y = Matrix([R, C, P])
return X.jacobian(Y)
def Jacobian2(dX, dY, X, Y):
A = Matrix([dX, dY])
B = Matrix([X, Y])
return A.jacobian(B)
<|reserved_special_token_0|>
def ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
r = par_dict['r']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
m_C = K_CP * m_P
q20 = params['q20']
q10 = params['q10']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hp = par_dict['t_hp']
t_hc = par_dict['t_hc']
hC0 = params['hC0']
hP0 = params['hP0']
dRLV = set_dRLV(R, C, P, r, K, a1, a2)
dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)
dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)
dRLVP = set_dRLVPart(R, P, r, K, a2)
dPLVP = set_dPredLV(R, P, a2, e2, q2)
dRLVC = set_dRLVPart(R, C, r, K, a1)
dCLVC = set_dPredLV(R, C, a1, e1, q1)
dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,
q1, q2)
dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)
dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)
dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)
CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)
CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)
PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
C_eq_RM = CNum_eq_RM / CDen_eq_RM
P_eq_RM = PNum_eq_RM / PDen_eq_RM
RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,
a3, e1, e2, e3, q1, q2)
DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,
'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,
'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,
'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':
dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,
'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}
return DynamicsDict
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def construct_param_dict(params, K_RC, K_CP, m_P):
"""
Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w
"""
w = params['w']
pd = params['pd']
pv = params['pv']
Er = params['Er']
Ek = params['Ek']
ER = params['ER']
EC = params['EC']
EP = params['EP']
Eq1 = params['Eq1']
Eq2 = params['Eq2']
a = params['a']
b = params['b']
c = params['c']
formC = params['formC']
formPC = params['formPC']
formPR = params['formPR']
TR = params['TR']
TC = params['TC']
TP = params['TP']
D_R = params['D_R']
D_C = params['D_C']
K_RP = K_RC * K_CP
fmC = params['fmC']
thermyR = params['thermyR']
thermyC = params['thermyC']
thermyP = params['thermyP']
fmPR = params['fmPR']
fmPC = params['fmPC']
m_C = K_CP * m_P
m_R = K_RP * m_P
r0 = params['r0']
k0 = params['k0']
a01 = a02 = params['a012']
a03 = params['a03']
d0 = params['d0']
q10 = params['q10']
q20 = params['q20']
v0R = params['v0R']
v0C = params['v0C']
v0P = params['v0P']
k = b_k
hC0 = params['hC0']
hP0 = params['hP0']
q1 = set_q1(q10, m_C, w, Eq1, TR, k)
q2 = set_q2(q20, m_P, w, Eq2, TC, k)
K = set_K(k0, m_R, w, Ek, TR, k)
r = set_r(r0, m_R, w, Er, TR, k)
a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,
alfa, fmC, thermyR, thermyC, k, a, b, c, formC)
a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,
alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)
a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,
alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)
t_hp = set_th(hP0, m_P, w, EP, k, TP)
t_hc = set_th(hC0, m_C, w, EC, k, TC)
param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,
'a3': a3, 't_hp': t_hp, 't_hc': t_hc}
return param_dict
def construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):
"""
Construct all the functions related to the computation of equilibrium values in the model, in any subsytem
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
q1_0 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)
R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)
R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)
R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)
R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
D = setD(K, a1, a2, a3, e1, e2, e3, r)
DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)
R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
R2 = (bR + sqrt(Dis)) / (2 * denR)
R3 = (bR - sqrt(Dis)) / (2 * denR)
eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,
'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,
'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':
R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,
'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}
return eq_dict
def construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):
"""
Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
m_C = K_CP * m_P
q10 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
R_eq_s2 = eq_dict['R_eq_s2']
C_eq_s2 = eq_dict['C_eq_s2']
P_eq_s3 = eq_dict['P_eq_s3']
R_eq_s3 = eq_dict['R_eq_s3']
R_eq_s2RM = eq_dict['R_eq_s2RM']
C_eq_s2RM = eq_dict['C_eq_s2RM']
R_eq_s3RM = eq_dict['R_eq_s3RM']
P_eq_s3RM = eq_dict['P_eq_s3RM']
I_C_s2 = set_I_C_s2(e1, a1, K, q1)
I_P_s3 = set_I_P_s3(e2, a2, K, q2)
I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)
I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)
I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)
I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)
I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)
I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,
t_hc, q10, q20, hP0, hC0)
inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,
'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,
'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}
return inv_dict
def Trophic_position(params, par_dict, eq_dict):
R_eq = eq_dict['R_eq']
a2 = par_dict['a2']
q2 = par_dict['q2']
e2 = params['e2']
MTP_C = set_MTP_C(R_eq, a2, q2, e2)
return MTP_C
def Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq = eq_dict['R_eq']
C_eq = eq_dict['C_eq']
P_eq = eq_dict['P_eq']
D = set_D(K, a1, a2, a3, e1, e2, e3, r)
d1 = set_d1(r, R_eq, K)
d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)
d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)
hd2 = set_hdet2(d1, d2, d3)
return hd2
def Jacobian(dR, dC, dP, R, C, P):
X = Matrix([dR, dC, dP])
Y = Matrix([R, C, P])
return X.jacobian(Y)
def Jacobian2(dX, dY, X, Y):
A = Matrix([dX, dY])
B = Matrix([X, Y])
return A.jacobian(B)
def setJacobianDict(DynamicsDict, R, C, P):
dRLV = DynamicsDict['dxLVa']
dCLV = DynamicsDict['dyLVa']
dPLV = DynamicsDict['dzLVa']
dRRM = DynamicsDict['dRRM']
dCRM = DynamicsDict['dCRM']
dPRM = DynamicsDict['dPRM']
dRLVP = DynamicsDict['dRLVP']
dRLVC = DynamicsDict['dRLVC']
dPLVP = DynamicsDict['dPLVP']
dCLVC = DynamicsDict['dCLVC']
JLV = Jacobian(dRLV, dCLV, dPLV, R, C, P)
JRM = Jacobian(dRRM, dCRM, dPRM, R, C, P)
JLVP = Jacobian2(dRLVP, dPLVP, R, P)
JLVC = Jacobian2(dRLVC, dCLVC, R, C)
return {'JLV': JLV, 'JRM': JRM, 'JLVP': JLVP, 'JLVC': JLVC}
def ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
r = par_dict['r']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
m_C = K_CP * m_P
q20 = params['q20']
q10 = params['q10']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hp = par_dict['t_hp']
t_hc = par_dict['t_hc']
hC0 = params['hC0']
hP0 = params['hP0']
dRLV = set_dRLV(R, C, P, r, K, a1, a2)
dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)
dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)
dRLVP = set_dRLVPart(R, P, r, K, a2)
dPLVP = set_dPredLV(R, P, a2, e2, q2)
dRLVC = set_dRLVPart(R, C, r, K, a1)
dCLVC = set_dPredLV(R, C, a1, e1, q1)
dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,
q1, q2)
dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)
dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)
dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)
CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)
CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)
PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
C_eq_RM = CNum_eq_RM / CDen_eq_RM
P_eq_RM = PNum_eq_RM / PDen_eq_RM
RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,
a3, e1, e2, e3, q1, q2)
DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,
'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,
'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,
'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':
dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,
'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}
return DynamicsDict
<|reserved_special_token_1|>
from coarsegrainparams import *
from inva_fcl_stab import *
from Eq import *
from Dynamics import *
from sympy import Matrix, sqrt
def construct_param_dict(params, K_RC, K_CP, m_P):
"""
Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w
"""
w = params['w']
pd = params['pd']
pv = params['pv']
Er = params['Er']
Ek = params['Ek']
ER = params['ER']
EC = params['EC']
EP = params['EP']
Eq1 = params['Eq1']
Eq2 = params['Eq2']
a = params['a']
b = params['b']
c = params['c']
formC = params['formC']
formPC = params['formPC']
formPR = params['formPR']
TR = params['TR']
TC = params['TC']
TP = params['TP']
D_R = params['D_R']
D_C = params['D_C']
K_RP = K_RC * K_CP
fmC = params['fmC']
thermyR = params['thermyR']
thermyC = params['thermyC']
thermyP = params['thermyP']
fmPR = params['fmPR']
fmPC = params['fmPC']
m_C = K_CP * m_P
m_R = K_RP * m_P
r0 = params['r0']
k0 = params['k0']
a01 = a02 = params['a012']
a03 = params['a03']
d0 = params['d0']
q10 = params['q10']
q20 = params['q20']
v0R = params['v0R']
v0C = params['v0C']
v0P = params['v0P']
k = b_k
hC0 = params['hC0']
hP0 = params['hP0']
q1 = set_q1(q10, m_C, w, Eq1, TR, k)
q2 = set_q2(q20, m_P, w, Eq2, TC, k)
K = set_K(k0, m_R, w, Ek, TR, k)
r = set_r(r0, m_R, w, Er, TR, k)
a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,
alfa, fmC, thermyR, thermyC, k, a, b, c, formC)
a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,
alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)
a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,
alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)
t_hp = set_th(hP0, m_P, w, EP, k, TP)
t_hc = set_th(hC0, m_C, w, EC, k, TC)
param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,
'a3': a3, 't_hp': t_hp, 't_hc': t_hc}
return param_dict
def construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):
"""
Construct all the functions related to the computation of equilibrium values in the model, in any subsytem
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
q1_0 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)
R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)
R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)
R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)
R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)
D = setD(K, a1, a2, a3, e1, e2, e3, r)
DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)
R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,
q20, q1_0, hC0, hP0)
denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,
m_C, q20, q1_0, hC0, hP0)
R2 = (bR + sqrt(Dis)) / (2 * denR)
R3 = (bR - sqrt(Dis)) / (2 * denR)
eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,
'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,
'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':
R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,
'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}
return eq_dict
def construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):
"""
Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios
"""
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
m_C = K_CP * m_P
q10 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
R_eq_s2 = eq_dict['R_eq_s2']
C_eq_s2 = eq_dict['C_eq_s2']
P_eq_s3 = eq_dict['P_eq_s3']
R_eq_s3 = eq_dict['R_eq_s3']
R_eq_s2RM = eq_dict['R_eq_s2RM']
C_eq_s2RM = eq_dict['C_eq_s2RM']
R_eq_s3RM = eq_dict['R_eq_s3RM']
P_eq_s3RM = eq_dict['P_eq_s3RM']
I_C_s2 = set_I_C_s2(e1, a1, K, q1)
I_P_s3 = set_I_P_s3(e2, a2, K, q2)
I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)
I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)
I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)
I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)
I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)
I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,
t_hc, q10, q20, hP0, hC0)
inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,
'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,
'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}
return inv_dict
def Trophic_position(params, par_dict, eq_dict):
R_eq = eq_dict['R_eq']
a2 = par_dict['a2']
q2 = par_dict['q2']
e2 = params['e2']
MTP_C = set_MTP_C(R_eq, a2, q2, e2)
return MTP_C
def Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):
K = par_dict['K']
r = par_dict['r']
m_C = K_CP * m_P
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
R_eq = eq_dict['R_eq']
C_eq = eq_dict['C_eq']
P_eq = eq_dict['P_eq']
D = set_D(K, a1, a2, a3, e1, e2, e3, r)
d1 = set_d1(r, R_eq, K)
d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)
d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)
hd2 = set_hdet2(d1, d2, d3)
return hd2
def Jacobian(dR, dC, dP, R, C, P):
X = Matrix([dR, dC, dP])
Y = Matrix([R, C, P])
return X.jacobian(Y)
def Jacobian2(dX, dY, X, Y):
A = Matrix([dX, dY])
B = Matrix([X, Y])
return A.jacobian(B)
def setJacobianDict(DynamicsDict, R, C, P):
dRLV = DynamicsDict['dxLVa']
dCLV = DynamicsDict['dyLVa']
dPLV = DynamicsDict['dzLVa']
dRRM = DynamicsDict['dRRM']
dCRM = DynamicsDict['dCRM']
dPRM = DynamicsDict['dPRM']
dRLVP = DynamicsDict['dRLVP']
dRLVC = DynamicsDict['dRLVC']
dPLVP = DynamicsDict['dPLVP']
dCLVC = DynamicsDict['dCLVC']
JLV = Jacobian(dRLV, dCLV, dPLV, R, C, P)
JRM = Jacobian(dRRM, dCRM, dPRM, R, C, P)
JLVP = Jacobian2(dRLVP, dPLVP, R, P)
JLVC = Jacobian2(dRLVC, dCLVC, R, C)
return {'JLV': JLV, 'JRM': JRM, 'JLVP': JLVP, 'JLVC': JLVC}
def ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):
q1 = par_dict['q1']
q2 = par_dict['q2']
K = par_dict['K']
r = par_dict['r']
e1 = params['e1']
e2 = params['e2']
e3 = params['e3']
m_C = K_CP * m_P
q20 = params['q20']
q10 = params['q10']
a1 = par_dict['a1']
a2 = par_dict['a2']
a3 = par_dict['a3']
t_hp = par_dict['t_hp']
t_hc = par_dict['t_hc']
hC0 = params['hC0']
hP0 = params['hP0']
dRLV = set_dRLV(R, C, P, r, K, a1, a2)
dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)
dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)
dRLVP = set_dRLVPart(R, P, r, K, a2)
dPLVP = set_dPredLV(R, P, a2, e2, q2)
dRLVC = set_dRLVPart(R, C, r, K, a1)
dCLVC = set_dPredLV(R, C, a1, e1, q1)
dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,
q1, q2)
dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)
dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)
dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)
CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)
CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)
PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,
t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)
C_eq_RM = CNum_eq_RM / CDen_eq_RM
P_eq_RM = PNum_eq_RM / PDen_eq_RM
RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,
a3, e1, e2, e3, q1, q2)
DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,
'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,
'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,
'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':
dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,
'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}
return DynamicsDict
<|reserved_special_token_1|>
from coarsegrainparams import *
from inva_fcl_stab import *
from Eq import *
from Dynamics import *
from sympy import Matrix,sqrt
def construct_param_dict(params,K_RC,K_CP,m_P):
"""
Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w
"""
###scaling constants
w=params['w']
pd=params['pd'] # in 3D and 0.21 in 2D
pv=params['pv']
Er=params['Er'] ;Ek=params['Ek']
ER=params['ER'];EC=params['EC'];EP=params['EP'];
Eq1=params['Eq1'];Eq2=params['Eq2']
#capture success function
a = params['a']
b = params['b']
c = params['c']
formC = params['formC']
formPC = params['formPC']
formPR = params['formPR']
###variables
TR= params['TR'] ;TC= params['TC'];TP=params['TP'];D_R= params['D_R']; D_C= params['D_C']
K_RP=K_RC*K_CP
fmC=params['fmC'];thermyR=params['thermyR']
thermyC=params['thermyC'];thermyP=params['thermyP']
fmPR=params['fmPR']
fmPC=params['fmPC']
m_C = K_CP*m_P;m_R = K_RP*m_P
###normalization constants and boltzmann constant
r0 = params['r0']
k0 = params['k0'] # will depend on the productivity of the habitat
a01 = a02 = params['a012'] # will depedend on the dimension of the habitat
a03 = params['a03']
d0= params['d0']
q10 = params['q10'];q20 = params['q20'];
v0R = params['v0R'];v0C =params['v0C'];v0P =params['v0P'];k = b_k
hC0 = params['hC0'];hP0 = params['hP0']
#intrapopulation parameters
q1=set_q1(q10,m_C,w,Eq1,TR,k)
q2=set_q2(q20,m_P,w,Eq2,TC,k)
K=set_K(k0,m_R,w,Ek,TR,k)
r=set_r(r0,m_R,w,Er,TR,k)
#interpopulation parameters
a1=set_alfa(m_C,a01,K_RC,pv,pd,TR,TC,ER,EC,D_R,v0R,v0C,g,alfa,fmC,thermyR,thermyC,k,a,b,c,formC)
a2=set_alfa(m_P,a02,K_RP,pv,pd,TR,TP,ER,EP,D_R,v0R,v0P,g,alfa,fmPR,thermyR,thermyP,k,a,b,c,formPR)
a3=set_alfa(m_P,a03,K_CP,pv,pd,TC,TP,EC,EP,D_C,v0C,v0P,g,alfa,fmPC,thermyC,thermyP,k,a,b,c,formPC)
t_hp = set_th(hP0,m_P,w,EP,k,TP)
t_hc = set_th(hC0,m_C,w,EC,k,TC)
param_dict={'q1':q1,'q2':q2,'K':K,'r':r,'a1':a1,'a2':a2,'a3':a3,'t_hp':t_hp,'t_hc':t_hc}
return param_dict
def construct_equilibrium(params,par_dict,K_RC,K_CP,m_P):
"""
Construct all the functions related to the computation of equilibrium values in the model, in any subsytem
"""
#intrapopulation parameters
q1=par_dict['q1']
q2=par_dict['q2']
q1_0 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
K=par_dict['K']
r=par_dict['r']
m_C = K_CP*m_P
#interpopulation parameters
a1=par_dict['a1']
a2=par_dict['a2']
a3=par_dict['a3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
e1=params['e1']
e2=params['e2']
e3=params['e3']
# Equilibrium values
##Sc2
###L-V
R_eq_s2 , C_eq_s2 = set_R_C_eq_sLV(r,K,q1,a1,e1)
###R-M
R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r,K,q1,q1_0,a1,e1,hC0)
##Sc3
###L-V
R_eq_s3,P_eq_s3 = set_R_C_eq_sLV(r,K,q2,a2,e2)
###R-M
R_eq_s3RM , P_eq_s3RM = set_R_C_eq_sRM(r,K,q2,q20,a2,e2,hP0)
###full system ( need to correct this.. in case want to use it, focus at the moment in invasibility stuff)
R_eq = set_R_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)
C_eq = set_C_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)
P_eq = set_P_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)
D = setD(K,a1,a2,a3,e1,e2,e3,r)
DBound= setDBound(K,a1,a2,a3,e1,e2,e3,m_C,r)
#Roots for Req
R1 = setRoot1(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)
Dis = setDis(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)
bR = setb_R(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)
denR = setden_R(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)
R2 = (bR + sqrt(Dis))/(2*denR)
R3 = (bR - sqrt(Dis))/(2*denR)
eq_dict={'R_eq_s2':R_eq_s2,'C_eq_s2':C_eq_s2,'R_eq_s3':R_eq_s3,'P_eq_s3':P_eq_s3,'R_eq':R_eq,'C_eq':C_eq,'P_eq':P_eq,
'R_eq_s2RM':R_eq_s2RM,'C_eq_s2RM':C_eq_s2RM,'R_eq_s3RM':R_eq_s3RM,'P_eq_s3RM':P_eq_s3RM,'R1':R1,'Discriminant':Dis,'R2':R2,'R3':R3,'bR':bR,'denR':denR,'D' : D,'DBound':DBound}
return eq_dict
def construct_inv_boundaries(params,par_dict,eq_dict,K_RC,K_CP,m_P):
"""
Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios
"""
#intrapop params
q1=par_dict['q1']
q2=par_dict['q2']
K =par_dict['K']
m_C= K_CP*m_P
q10 = params['q10']
q20 = params['q20']
hC0 = params['hC0']
hP0 = params['hP0']
#interpop params
a1=par_dict['a1']
a2=par_dict['a2']
a3=par_dict['a3']
e1=params['e1']
e2=params['e2']
e3=params['e3']
t_hc = par_dict['t_hc']
t_hp = par_dict['t_hp']
#eq values
#L-V
R_eq_s2 = eq_dict['R_eq_s2']
C_eq_s2 = eq_dict['C_eq_s2']
P_eq_s3 = eq_dict['P_eq_s3']
R_eq_s3 = eq_dict['R_eq_s3']
#R-M
R_eq_s2RM = eq_dict['R_eq_s2RM']
C_eq_s2RM = eq_dict['C_eq_s2RM']
R_eq_s3RM = eq_dict['R_eq_s3RM']
P_eq_s3RM = eq_dict['P_eq_s3RM']
##Invasibility boundaries
#L-V
I_C_s2 = set_I_C_s2(e1,a1,K,q1)
I_P_s3 = set_I_P_s3(e2,a2,K,q2)
I_P_s4 = set_I_P_s4(e2,e3,a2,a3,q2,R_eq_s2,C_eq_s2)
I_C_s5 = set_I_C_s5(e1,a1,a3,R_eq_s3,P_eq_s3,q1)
#R-M
I_C_s2RM = set_I_C_s2RM(e1,a1,K,q1,hC0,q10)
I_P_s3RM = set_I_P_s3RM(e2,a2,K,q2,hP0,q20)
I_P_s4RM = set_I_P_s4RM(e2,e3,a2,a3,q2,R_eq_s2RM,C_eq_s2RM,hP0,q20)
I_C_s5RM = set_I_C_s5RM(e1,e2,a1,a3,m_C,R_eq_s3RM,P_eq_s3RM,q1,t_hc,q10,q20,hP0,hC0)
inv_dict= {'I_C_s2':I_C_s2,'I_P_s3':I_P_s3,'I_P_s4':I_P_s4,'I_C_s5':I_C_s5,
'I_C_s2RM':I_C_s2RM,'I_P_s3RM':I_P_s3RM,'I_P_s4RM':I_P_s4RM,'I_C_s5RM':I_C_s5RM}
return inv_dict
def Trophic_position(params,par_dict,eq_dict):
R_eq = eq_dict['R_eq']
a2 = par_dict['a2']
q2 = par_dict['q2']
e2 = params['e2']
#Trophic position in the coexistence domain
MTP_C= set_MTP_C(R_eq,a2,q2,e2)
return MTP_C
def Stability(params,par_dict,eq_dict,K_RC,K_CP,m_P):
#intrapop params
K=par_dict['K']
r=par_dict['r']
m_C = K_CP*m_P
#interpop params
a1=par_dict['a1']
a2=par_dict['a2']
a3=par_dict['a3']
e1=params['e1']
e2=params['e2']
e3=params['e3']
#equilibrium
R_eq= eq_dict['R_eq']
C_eq = eq_dict['C_eq']
P_eq = eq_dict['P_eq']
##Stability
D = set_D(K,a1,a2,a3,e1,e2,e3,r)
d1 = set_d1(r,R_eq,K)
d2 = set_d2(e1,e2,e3,a1,a2,a3,C_eq,R_eq,P_eq)
d3 = set_d3(D,a3,C_eq,R_eq,P_eq,K)
hd2 = set_hdet2(d1,d2,d3)
return hd2
def Jacobian(dR,dC,dP,R,C,P):
X = Matrix([dR,dC,dP])
Y = Matrix([R,C,P])
return X.jacobian(Y)
def Jacobian2(dX,dY,X,Y):
A = Matrix([dX,dY])
B = Matrix([X,Y])
return A.jacobian(B)
def setJacobianDict(DynamicsDict,R,C,P):
dRLV = DynamicsDict['dxLVa']
dCLV = DynamicsDict['dyLVa']
dPLV = DynamicsDict['dzLVa']
dRRM = DynamicsDict['dRRM']
dCRM = DynamicsDict['dCRM']
dPRM = DynamicsDict['dPRM']
dRLVP = DynamicsDict['dRLVP']
dRLVC = DynamicsDict['dRLVC']
dPLVP = DynamicsDict['dPLVP']
dCLVC = DynamicsDict['dCLVC']
JLV = Jacobian(dRLV,dCLV,dPLV,R,C,P)
JRM = Jacobian(dRRM,dCRM,dPRM,R,C,P)
JLVP = Jacobian2(dRLVP,dPLVP,R,P)
JLVC = Jacobian2(dRLVC,dCLVC,R,C)
return {'JLV':JLV,'JRM':JRM,'JLVP':JLVP,'JLVC':JLVC}
def ConstructDynamicalFunctions(params,par_dict,K_RC,K_CP,m_P,R,C,P):
#intrapopulation parameters
q1=par_dict['q1']
q2=par_dict['q2']
K=par_dict['K']
r=par_dict['r']
e1=params['e1']
e2=params['e2']
e3=params['e3']
m_C = K_CP*m_P
q20 = params['q20']
q10 = params['q10']
#interpopulation parameters
a1=par_dict['a1']
a2=par_dict['a2']
a3=par_dict['a3']
t_hp=par_dict['t_hp']
t_hc=par_dict['t_hc']
hC0=params['hC0']
hP0=params['hP0']
#Construct LV functions
dRLV=set_dRLV(R,C,P,r,K,a1,a2)
dPLV=set_dPLV(R,C,P,a2,a3,e2,e3,q2)
dCLV=set_dCLV(R,C,P,a1,a3,e1,q1)
dRLVP = set_dRLVPart(R,P,r,K,a2)
dPLVP = set_dPredLV(R,P,a2,e2,q2)
dRLVC = set_dRLVPart(R,C,r,K,a1)
dCLVC = set_dPredLV(R,C,a1,e1,q1)
dxLVa,dyLVa,dzLVa = set_LVAdim(R,C,P,r,K,a1,a2,a3,e1,e2,e3,q1,q2)
#Construct RM functions
dRRM = set_dRRM(R,C,P,r,K,a1,a2,a3,t_hp,t_hc,m_C,m_P)
dCRM = set_dCRM(R,C,P,a1,a2,a3,e1,t_hc,t_hp,q1,m_C,m_P)
dPRM = set_dPRM(R,C,P,a2,a3,e2,e3,t_hp,q2,m_P)
#RM eq expresions
CNum_eq_RM = setEqCNum_RM(q2,m_P,a2,R,e2,q20,hP0)
CDen_eq_RM = setEqCDen_RM(e3,q20,hP0)
PNum_eq_RM = setEqPNum_RM(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,R,C,P,m_P,m_C,q20,q10,hC0,hP0)
PDen_eq_RM = setEqPDen_RM(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,R,C,P,m_P,m_C,q20,q10,hC0,hP0)
C_eq_RM = CNum_eq_RM/CDen_eq_RM
P_eq_RM = PNum_eq_RM/PDen_eq_RM
#Isoclines
RIsoLVa,CIsoLVa,PIsoLVa = set_IsoclinesLVAdim(R,C,P,r,K,a1,a2,a3,e1,e2,e3,q1,q2)
DynamicsDict={'dRLV':dRLV,'dPLV':dPLV,'dCLV':dCLV,'dRRM':dRRM,'dPRM':dPRM,'dCRM':dCRM,'C_eq_RM':C_eq_RM,'P_eq_RM':P_eq_RM,'PNum_eq_RM':PNum_eq_RM,'CNum_eq_RM':CNum_eq_RM,'dRLVP':dRLVP,'dPLVP':dPLVP,'dRLVC':dRLVC,'dCLVC':dCLVC,'EigR':-r,'dxLVa':dxLVa,'dyLVa':dyLVa,'dzLVa':dzLVa,'RIsoLVa':RIsoLVa,'CIsoLVa':CIsoLVa,'PIsoLVa':PIsoLVa}
return DynamicsDict
|
flexible
|
{
"blob_id": "99c12e925850fe7603831df5b159db30508f4515",
"index": 3832,
"step-1": "<mask token>\n\n\ndef construct_param_dict(params, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w\n \"\"\"\n w = params['w']\n pd = params['pd']\n pv = params['pv']\n Er = params['Er']\n Ek = params['Ek']\n ER = params['ER']\n EC = params['EC']\n EP = params['EP']\n Eq1 = params['Eq1']\n Eq2 = params['Eq2']\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n TR = params['TR']\n TC = params['TC']\n TP = params['TP']\n D_R = params['D_R']\n D_C = params['D_C']\n K_RP = K_RC * K_CP\n fmC = params['fmC']\n thermyR = params['thermyR']\n thermyC = params['thermyC']\n thermyP = params['thermyP']\n fmPR = params['fmPR']\n fmPC = params['fmPC']\n m_C = K_CP * m_P\n m_R = K_RP * m_P\n r0 = params['r0']\n k0 = params['k0']\n a01 = a02 = params['a012']\n a03 = params['a03']\n d0 = params['d0']\n q10 = params['q10']\n q20 = params['q20']\n v0R = params['v0R']\n v0C = params['v0C']\n v0P = params['v0P']\n k = b_k\n hC0 = params['hC0']\n hP0 = params['hP0']\n q1 = set_q1(q10, m_C, w, Eq1, TR, k)\n q2 = set_q2(q20, m_P, w, Eq2, TC, k)\n K = set_K(k0, m_R, w, Ek, TR, k)\n r = set_r(r0, m_R, w, Er, TR, k)\n a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,\n alfa, fmC, thermyR, thermyC, k, a, b, c, formC)\n a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,\n alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)\n a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,\n alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)\n t_hp = set_th(hP0, m_P, w, EP, k, TP)\n t_hc = set_th(hC0, m_C, w, EC, k, TC)\n param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,\n 'a3': a3, 't_hp': t_hp, 't_hc': t_hc}\n return param_dict\n\n\n<mask token>\n\n\ndef Trophic_position(params, par_dict, eq_dict):\n R_eq = eq_dict['R_eq']\n a2 = par_dict['a2']\n q2 = par_dict['q2']\n e2 = params['e2']\n MTP_C = set_MTP_C(R_eq, a2, q2, e2)\n return MTP_C\n\n\ndef Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq = eq_dict['R_eq']\n C_eq = eq_dict['C_eq']\n P_eq = eq_dict['P_eq']\n D = set_D(K, a1, a2, a3, e1, e2, e3, r)\n d1 = set_d1(r, R_eq, K)\n d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)\n d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)\n hd2 = set_hdet2(d1, d2, d3)\n return hd2\n\n\n<mask token>\n\n\ndef Jacobian2(dX, dY, X, Y):\n A = Matrix([dX, dY])\n B = Matrix([X, Y])\n return A.jacobian(B)\n\n\n<mask token>\n\n\ndef ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n r = par_dict['r']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n m_C = K_CP * m_P\n q20 = params['q20']\n q10 = params['q10']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hp = par_dict['t_hp']\n t_hc = par_dict['t_hc']\n hC0 = params['hC0']\n hP0 = params['hP0']\n dRLV = set_dRLV(R, C, P, r, K, a1, a2)\n dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)\n dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)\n dRLVP = set_dRLVPart(R, P, r, K, a2)\n dPLVP = set_dPredLV(R, P, a2, e2, q2)\n dRLVC = set_dRLVPart(R, C, r, K, a1)\n dCLVC = set_dPredLV(R, C, a1, e1, q1)\n dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,\n q1, q2)\n dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)\n dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)\n dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)\n CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)\n CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)\n PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n C_eq_RM = CNum_eq_RM / CDen_eq_RM\n P_eq_RM = PNum_eq_RM / PDen_eq_RM\n RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,\n a3, e1, e2, e3, q1, q2)\n DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,\n 'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,\n 'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,\n 'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':\n dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,\n 'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}\n return DynamicsDict\n",
"step-2": "<mask token>\n\n\ndef construct_param_dict(params, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w\n \"\"\"\n w = params['w']\n pd = params['pd']\n pv = params['pv']\n Er = params['Er']\n Ek = params['Ek']\n ER = params['ER']\n EC = params['EC']\n EP = params['EP']\n Eq1 = params['Eq1']\n Eq2 = params['Eq2']\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n TR = params['TR']\n TC = params['TC']\n TP = params['TP']\n D_R = params['D_R']\n D_C = params['D_C']\n K_RP = K_RC * K_CP\n fmC = params['fmC']\n thermyR = params['thermyR']\n thermyC = params['thermyC']\n thermyP = params['thermyP']\n fmPR = params['fmPR']\n fmPC = params['fmPC']\n m_C = K_CP * m_P\n m_R = K_RP * m_P\n r0 = params['r0']\n k0 = params['k0']\n a01 = a02 = params['a012']\n a03 = params['a03']\n d0 = params['d0']\n q10 = params['q10']\n q20 = params['q20']\n v0R = params['v0R']\n v0C = params['v0C']\n v0P = params['v0P']\n k = b_k\n hC0 = params['hC0']\n hP0 = params['hP0']\n q1 = set_q1(q10, m_C, w, Eq1, TR, k)\n q2 = set_q2(q20, m_P, w, Eq2, TC, k)\n K = set_K(k0, m_R, w, Ek, TR, k)\n r = set_r(r0, m_R, w, Er, TR, k)\n a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,\n alfa, fmC, thermyR, thermyC, k, a, b, c, formC)\n a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,\n alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)\n a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,\n alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)\n t_hp = set_th(hP0, m_P, w, EP, k, TP)\n t_hc = set_th(hC0, m_C, w, EC, k, TC)\n param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,\n 'a3': a3, 't_hp': t_hp, 't_hc': t_hc}\n return param_dict\n\n\ndef construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the functions related to the computation of equilibrium values in the model, in any subsytem\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n q1_0 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)\n R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)\n R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)\n R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)\n R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n D = setD(K, a1, a2, a3, e1, e2, e3, r)\n DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)\n R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n R2 = (bR + sqrt(Dis)) / (2 * denR)\n R3 = (bR - sqrt(Dis)) / (2 * denR)\n eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,\n 'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,\n 'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':\n R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,\n 'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}\n return eq_dict\n\n\ndef construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n m_C = K_CP * m_P\n q10 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n R_eq_s2 = eq_dict['R_eq_s2']\n C_eq_s2 = eq_dict['C_eq_s2']\n P_eq_s3 = eq_dict['P_eq_s3']\n R_eq_s3 = eq_dict['R_eq_s3']\n R_eq_s2RM = eq_dict['R_eq_s2RM']\n C_eq_s2RM = eq_dict['C_eq_s2RM']\n R_eq_s3RM = eq_dict['R_eq_s3RM']\n P_eq_s3RM = eq_dict['P_eq_s3RM']\n I_C_s2 = set_I_C_s2(e1, a1, K, q1)\n I_P_s3 = set_I_P_s3(e2, a2, K, q2)\n I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)\n I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)\n I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)\n I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)\n I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)\n I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,\n t_hc, q10, q20, hP0, hC0)\n inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,\n 'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,\n 'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}\n return inv_dict\n\n\ndef Trophic_position(params, par_dict, eq_dict):\n R_eq = eq_dict['R_eq']\n a2 = par_dict['a2']\n q2 = par_dict['q2']\n e2 = params['e2']\n MTP_C = set_MTP_C(R_eq, a2, q2, e2)\n return MTP_C\n\n\ndef Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq = eq_dict['R_eq']\n C_eq = eq_dict['C_eq']\n P_eq = eq_dict['P_eq']\n D = set_D(K, a1, a2, a3, e1, e2, e3, r)\n d1 = set_d1(r, R_eq, K)\n d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)\n d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)\n hd2 = set_hdet2(d1, d2, d3)\n return hd2\n\n\ndef Jacobian(dR, dC, dP, R, C, P):\n X = Matrix([dR, dC, dP])\n Y = Matrix([R, C, P])\n return X.jacobian(Y)\n\n\ndef Jacobian2(dX, dY, X, Y):\n A = Matrix([dX, dY])\n B = Matrix([X, Y])\n return A.jacobian(B)\n\n\n<mask token>\n\n\ndef ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n r = par_dict['r']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n m_C = K_CP * m_P\n q20 = params['q20']\n q10 = params['q10']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hp = par_dict['t_hp']\n t_hc = par_dict['t_hc']\n hC0 = params['hC0']\n hP0 = params['hP0']\n dRLV = set_dRLV(R, C, P, r, K, a1, a2)\n dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)\n dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)\n dRLVP = set_dRLVPart(R, P, r, K, a2)\n dPLVP = set_dPredLV(R, P, a2, e2, q2)\n dRLVC = set_dRLVPart(R, C, r, K, a1)\n dCLVC = set_dPredLV(R, C, a1, e1, q1)\n dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,\n q1, q2)\n dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)\n dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)\n dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)\n CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)\n CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)\n PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n C_eq_RM = CNum_eq_RM / CDen_eq_RM\n P_eq_RM = PNum_eq_RM / PDen_eq_RM\n RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,\n a3, e1, e2, e3, q1, q2)\n DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,\n 'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,\n 'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,\n 'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':\n dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,\n 'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}\n return DynamicsDict\n",
"step-3": "<mask token>\n\n\ndef construct_param_dict(params, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w\n \"\"\"\n w = params['w']\n pd = params['pd']\n pv = params['pv']\n Er = params['Er']\n Ek = params['Ek']\n ER = params['ER']\n EC = params['EC']\n EP = params['EP']\n Eq1 = params['Eq1']\n Eq2 = params['Eq2']\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n TR = params['TR']\n TC = params['TC']\n TP = params['TP']\n D_R = params['D_R']\n D_C = params['D_C']\n K_RP = K_RC * K_CP\n fmC = params['fmC']\n thermyR = params['thermyR']\n thermyC = params['thermyC']\n thermyP = params['thermyP']\n fmPR = params['fmPR']\n fmPC = params['fmPC']\n m_C = K_CP * m_P\n m_R = K_RP * m_P\n r0 = params['r0']\n k0 = params['k0']\n a01 = a02 = params['a012']\n a03 = params['a03']\n d0 = params['d0']\n q10 = params['q10']\n q20 = params['q20']\n v0R = params['v0R']\n v0C = params['v0C']\n v0P = params['v0P']\n k = b_k\n hC0 = params['hC0']\n hP0 = params['hP0']\n q1 = set_q1(q10, m_C, w, Eq1, TR, k)\n q2 = set_q2(q20, m_P, w, Eq2, TC, k)\n K = set_K(k0, m_R, w, Ek, TR, k)\n r = set_r(r0, m_R, w, Er, TR, k)\n a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,\n alfa, fmC, thermyR, thermyC, k, a, b, c, formC)\n a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,\n alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)\n a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,\n alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)\n t_hp = set_th(hP0, m_P, w, EP, k, TP)\n t_hc = set_th(hC0, m_C, w, EC, k, TC)\n param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,\n 'a3': a3, 't_hp': t_hp, 't_hc': t_hc}\n return param_dict\n\n\ndef construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the functions related to the computation of equilibrium values in the model, in any subsytem\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n q1_0 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)\n R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)\n R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)\n R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)\n R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n D = setD(K, a1, a2, a3, e1, e2, e3, r)\n DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)\n R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n R2 = (bR + sqrt(Dis)) / (2 * denR)\n R3 = (bR - sqrt(Dis)) / (2 * denR)\n eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,\n 'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,\n 'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':\n R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,\n 'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}\n return eq_dict\n\n\ndef construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n m_C = K_CP * m_P\n q10 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n R_eq_s2 = eq_dict['R_eq_s2']\n C_eq_s2 = eq_dict['C_eq_s2']\n P_eq_s3 = eq_dict['P_eq_s3']\n R_eq_s3 = eq_dict['R_eq_s3']\n R_eq_s2RM = eq_dict['R_eq_s2RM']\n C_eq_s2RM = eq_dict['C_eq_s2RM']\n R_eq_s3RM = eq_dict['R_eq_s3RM']\n P_eq_s3RM = eq_dict['P_eq_s3RM']\n I_C_s2 = set_I_C_s2(e1, a1, K, q1)\n I_P_s3 = set_I_P_s3(e2, a2, K, q2)\n I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)\n I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)\n I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)\n I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)\n I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)\n I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,\n t_hc, q10, q20, hP0, hC0)\n inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,\n 'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,\n 'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}\n return inv_dict\n\n\ndef Trophic_position(params, par_dict, eq_dict):\n R_eq = eq_dict['R_eq']\n a2 = par_dict['a2']\n q2 = par_dict['q2']\n e2 = params['e2']\n MTP_C = set_MTP_C(R_eq, a2, q2, e2)\n return MTP_C\n\n\ndef Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq = eq_dict['R_eq']\n C_eq = eq_dict['C_eq']\n P_eq = eq_dict['P_eq']\n D = set_D(K, a1, a2, a3, e1, e2, e3, r)\n d1 = set_d1(r, R_eq, K)\n d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)\n d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)\n hd2 = set_hdet2(d1, d2, d3)\n return hd2\n\n\ndef Jacobian(dR, dC, dP, R, C, P):\n X = Matrix([dR, dC, dP])\n Y = Matrix([R, C, P])\n return X.jacobian(Y)\n\n\ndef Jacobian2(dX, dY, X, Y):\n A = Matrix([dX, dY])\n B = Matrix([X, Y])\n return A.jacobian(B)\n\n\ndef setJacobianDict(DynamicsDict, R, C, P):\n dRLV = DynamicsDict['dxLVa']\n dCLV = DynamicsDict['dyLVa']\n dPLV = DynamicsDict['dzLVa']\n dRRM = DynamicsDict['dRRM']\n dCRM = DynamicsDict['dCRM']\n dPRM = DynamicsDict['dPRM']\n dRLVP = DynamicsDict['dRLVP']\n dRLVC = DynamicsDict['dRLVC']\n dPLVP = DynamicsDict['dPLVP']\n dCLVC = DynamicsDict['dCLVC']\n JLV = Jacobian(dRLV, dCLV, dPLV, R, C, P)\n JRM = Jacobian(dRRM, dCRM, dPRM, R, C, P)\n JLVP = Jacobian2(dRLVP, dPLVP, R, P)\n JLVC = Jacobian2(dRLVC, dCLVC, R, C)\n return {'JLV': JLV, 'JRM': JRM, 'JLVP': JLVP, 'JLVC': JLVC}\n\n\ndef ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n r = par_dict['r']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n m_C = K_CP * m_P\n q20 = params['q20']\n q10 = params['q10']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hp = par_dict['t_hp']\n t_hc = par_dict['t_hc']\n hC0 = params['hC0']\n hP0 = params['hP0']\n dRLV = set_dRLV(R, C, P, r, K, a1, a2)\n dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)\n dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)\n dRLVP = set_dRLVPart(R, P, r, K, a2)\n dPLVP = set_dPredLV(R, P, a2, e2, q2)\n dRLVC = set_dRLVPart(R, C, r, K, a1)\n dCLVC = set_dPredLV(R, C, a1, e1, q1)\n dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,\n q1, q2)\n dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)\n dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)\n dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)\n CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)\n CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)\n PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n C_eq_RM = CNum_eq_RM / CDen_eq_RM\n P_eq_RM = PNum_eq_RM / PDen_eq_RM\n RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,\n a3, e1, e2, e3, q1, q2)\n DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,\n 'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,\n 'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,\n 'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':\n dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,\n 'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}\n return DynamicsDict\n",
"step-4": "from coarsegrainparams import *\nfrom inva_fcl_stab import *\nfrom Eq import *\nfrom Dynamics import *\nfrom sympy import Matrix, sqrt\n\n\ndef construct_param_dict(params, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w\n \"\"\"\n w = params['w']\n pd = params['pd']\n pv = params['pv']\n Er = params['Er']\n Ek = params['Ek']\n ER = params['ER']\n EC = params['EC']\n EP = params['EP']\n Eq1 = params['Eq1']\n Eq2 = params['Eq2']\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n TR = params['TR']\n TC = params['TC']\n TP = params['TP']\n D_R = params['D_R']\n D_C = params['D_C']\n K_RP = K_RC * K_CP\n fmC = params['fmC']\n thermyR = params['thermyR']\n thermyC = params['thermyC']\n thermyP = params['thermyP']\n fmPR = params['fmPR']\n fmPC = params['fmPC']\n m_C = K_CP * m_P\n m_R = K_RP * m_P\n r0 = params['r0']\n k0 = params['k0']\n a01 = a02 = params['a012']\n a03 = params['a03']\n d0 = params['d0']\n q10 = params['q10']\n q20 = params['q20']\n v0R = params['v0R']\n v0C = params['v0C']\n v0P = params['v0P']\n k = b_k\n hC0 = params['hC0']\n hP0 = params['hP0']\n q1 = set_q1(q10, m_C, w, Eq1, TR, k)\n q2 = set_q2(q20, m_P, w, Eq2, TC, k)\n K = set_K(k0, m_R, w, Ek, TR, k)\n r = set_r(r0, m_R, w, Er, TR, k)\n a1 = set_alfa(m_C, a01, K_RC, pv, pd, TR, TC, ER, EC, D_R, v0R, v0C, g,\n alfa, fmC, thermyR, thermyC, k, a, b, c, formC)\n a2 = set_alfa(m_P, a02, K_RP, pv, pd, TR, TP, ER, EP, D_R, v0R, v0P, g,\n alfa, fmPR, thermyR, thermyP, k, a, b, c, formPR)\n a3 = set_alfa(m_P, a03, K_CP, pv, pd, TC, TP, EC, EP, D_C, v0C, v0P, g,\n alfa, fmPC, thermyC, thermyP, k, a, b, c, formPC)\n t_hp = set_th(hP0, m_P, w, EP, k, TP)\n t_hc = set_th(hC0, m_C, w, EC, k, TC)\n param_dict = {'q1': q1, 'q2': q2, 'K': K, 'r': r, 'a1': a1, 'a2': a2,\n 'a3': a3, 't_hp': t_hp, 't_hc': t_hc}\n return param_dict\n\n\ndef construct_equilibrium(params, par_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct all the functions related to the computation of equilibrium values in the model, in any subsytem\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n q1_0 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq_s2, C_eq_s2 = set_R_C_eq_sLV(r, K, q1, a1, e1)\n R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r, K, q1, q1_0, a1, e1, hC0)\n R_eq_s3, P_eq_s3 = set_R_C_eq_sLV(r, K, q2, a2, e2)\n R_eq_s3RM, P_eq_s3RM = set_R_C_eq_sRM(r, K, q2, q20, a2, e2, hP0)\n R_eq = set_R_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n C_eq = set_C_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n P_eq = set_P_eq(K, q1, q2, r, a1, a2, a3, e1, e2, e3)\n D = setD(K, a1, a2, a3, e1, e2, e3, r)\n DBound = setDBound(K, a1, a2, a3, e1, e2, e3, m_C, r)\n R1 = setRoot1(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n Dis = setDis(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n bR = setb_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P, m_C,\n q20, q1_0, hC0, hP0)\n denR = setden_R(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc, t_hp, m_P,\n m_C, q20, q1_0, hC0, hP0)\n R2 = (bR + sqrt(Dis)) / (2 * denR)\n R3 = (bR - sqrt(Dis)) / (2 * denR)\n eq_dict = {'R_eq_s2': R_eq_s2, 'C_eq_s2': C_eq_s2, 'R_eq_s3': R_eq_s3,\n 'P_eq_s3': P_eq_s3, 'R_eq': R_eq, 'C_eq': C_eq, 'P_eq': P_eq,\n 'R_eq_s2RM': R_eq_s2RM, 'C_eq_s2RM': C_eq_s2RM, 'R_eq_s3RM':\n R_eq_s3RM, 'P_eq_s3RM': P_eq_s3RM, 'R1': R1, 'Discriminant': Dis,\n 'R2': R2, 'R3': R3, 'bR': bR, 'denR': denR, 'D': D, 'DBound': DBound}\n return eq_dict\n\n\ndef construct_inv_boundaries(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n \"\"\"\n Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios\n \"\"\"\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n m_C = K_CP * m_P\n q10 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n R_eq_s2 = eq_dict['R_eq_s2']\n C_eq_s2 = eq_dict['C_eq_s2']\n P_eq_s3 = eq_dict['P_eq_s3']\n R_eq_s3 = eq_dict['R_eq_s3']\n R_eq_s2RM = eq_dict['R_eq_s2RM']\n C_eq_s2RM = eq_dict['C_eq_s2RM']\n R_eq_s3RM = eq_dict['R_eq_s3RM']\n P_eq_s3RM = eq_dict['P_eq_s3RM']\n I_C_s2 = set_I_C_s2(e1, a1, K, q1)\n I_P_s3 = set_I_P_s3(e2, a2, K, q2)\n I_P_s4 = set_I_P_s4(e2, e3, a2, a3, q2, R_eq_s2, C_eq_s2)\n I_C_s5 = set_I_C_s5(e1, a1, a3, R_eq_s3, P_eq_s3, q1)\n I_C_s2RM = set_I_C_s2RM(e1, a1, K, q1, hC0, q10)\n I_P_s3RM = set_I_P_s3RM(e2, a2, K, q2, hP0, q20)\n I_P_s4RM = set_I_P_s4RM(e2, e3, a2, a3, q2, R_eq_s2RM, C_eq_s2RM, hP0, q20)\n I_C_s5RM = set_I_C_s5RM(e1, e2, a1, a3, m_C, R_eq_s3RM, P_eq_s3RM, q1,\n t_hc, q10, q20, hP0, hC0)\n inv_dict = {'I_C_s2': I_C_s2, 'I_P_s3': I_P_s3, 'I_P_s4': I_P_s4,\n 'I_C_s5': I_C_s5, 'I_C_s2RM': I_C_s2RM, 'I_P_s3RM': I_P_s3RM,\n 'I_P_s4RM': I_P_s4RM, 'I_C_s5RM': I_C_s5RM}\n return inv_dict\n\n\ndef Trophic_position(params, par_dict, eq_dict):\n R_eq = eq_dict['R_eq']\n a2 = par_dict['a2']\n q2 = par_dict['q2']\n e2 = params['e2']\n MTP_C = set_MTP_C(R_eq, a2, q2, e2)\n return MTP_C\n\n\ndef Stability(params, par_dict, eq_dict, K_RC, K_CP, m_P):\n K = par_dict['K']\n r = par_dict['r']\n m_C = K_CP * m_P\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n R_eq = eq_dict['R_eq']\n C_eq = eq_dict['C_eq']\n P_eq = eq_dict['P_eq']\n D = set_D(K, a1, a2, a3, e1, e2, e3, r)\n d1 = set_d1(r, R_eq, K)\n d2 = set_d2(e1, e2, e3, a1, a2, a3, C_eq, R_eq, P_eq)\n d3 = set_d3(D, a3, C_eq, R_eq, P_eq, K)\n hd2 = set_hdet2(d1, d2, d3)\n return hd2\n\n\ndef Jacobian(dR, dC, dP, R, C, P):\n X = Matrix([dR, dC, dP])\n Y = Matrix([R, C, P])\n return X.jacobian(Y)\n\n\ndef Jacobian2(dX, dY, X, Y):\n A = Matrix([dX, dY])\n B = Matrix([X, Y])\n return A.jacobian(B)\n\n\ndef setJacobianDict(DynamicsDict, R, C, P):\n dRLV = DynamicsDict['dxLVa']\n dCLV = DynamicsDict['dyLVa']\n dPLV = DynamicsDict['dzLVa']\n dRRM = DynamicsDict['dRRM']\n dCRM = DynamicsDict['dCRM']\n dPRM = DynamicsDict['dPRM']\n dRLVP = DynamicsDict['dRLVP']\n dRLVC = DynamicsDict['dRLVC']\n dPLVP = DynamicsDict['dPLVP']\n dCLVC = DynamicsDict['dCLVC']\n JLV = Jacobian(dRLV, dCLV, dPLV, R, C, P)\n JRM = Jacobian(dRRM, dCRM, dPRM, R, C, P)\n JLVP = Jacobian2(dRLVP, dPLVP, R, P)\n JLVC = Jacobian2(dRLVC, dCLVC, R, C)\n return {'JLV': JLV, 'JRM': JRM, 'JLVP': JLVP, 'JLVC': JLVC}\n\n\ndef ConstructDynamicalFunctions(params, par_dict, K_RC, K_CP, m_P, R, C, P):\n q1 = par_dict['q1']\n q2 = par_dict['q2']\n K = par_dict['K']\n r = par_dict['r']\n e1 = params['e1']\n e2 = params['e2']\n e3 = params['e3']\n m_C = K_CP * m_P\n q20 = params['q20']\n q10 = params['q10']\n a1 = par_dict['a1']\n a2 = par_dict['a2']\n a3 = par_dict['a3']\n t_hp = par_dict['t_hp']\n t_hc = par_dict['t_hc']\n hC0 = params['hC0']\n hP0 = params['hP0']\n dRLV = set_dRLV(R, C, P, r, K, a1, a2)\n dPLV = set_dPLV(R, C, P, a2, a3, e2, e3, q2)\n dCLV = set_dCLV(R, C, P, a1, a3, e1, q1)\n dRLVP = set_dRLVPart(R, P, r, K, a2)\n dPLVP = set_dPredLV(R, P, a2, e2, q2)\n dRLVC = set_dRLVPart(R, C, r, K, a1)\n dCLVC = set_dPredLV(R, C, a1, e1, q1)\n dxLVa, dyLVa, dzLVa = set_LVAdim(R, C, P, r, K, a1, a2, a3, e1, e2, e3,\n q1, q2)\n dRRM = set_dRRM(R, C, P, r, K, a1, a2, a3, t_hp, t_hc, m_C, m_P)\n dCRM = set_dCRM(R, C, P, a1, a2, a3, e1, t_hc, t_hp, q1, m_C, m_P)\n dPRM = set_dPRM(R, C, P, a2, a3, e2, e3, t_hp, q2, m_P)\n CNum_eq_RM = setEqCNum_RM(q2, m_P, a2, R, e2, q20, hP0)\n CDen_eq_RM = setEqCDen_RM(e3, q20, hP0)\n PNum_eq_RM = setEqPNum_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n PDen_eq_RM = setEqPDen_RM(K, q1, q2, r, a1, a2, a3, e1, e2, e3, t_hc,\n t_hp, R, C, P, m_P, m_C, q20, q10, hC0, hP0)\n C_eq_RM = CNum_eq_RM / CDen_eq_RM\n P_eq_RM = PNum_eq_RM / PDen_eq_RM\n RIsoLVa, CIsoLVa, PIsoLVa = set_IsoclinesLVAdim(R, C, P, r, K, a1, a2,\n a3, e1, e2, e3, q1, q2)\n DynamicsDict = {'dRLV': dRLV, 'dPLV': dPLV, 'dCLV': dCLV, 'dRRM': dRRM,\n 'dPRM': dPRM, 'dCRM': dCRM, 'C_eq_RM': C_eq_RM, 'P_eq_RM': P_eq_RM,\n 'PNum_eq_RM': PNum_eq_RM, 'CNum_eq_RM': CNum_eq_RM, 'dRLVP': dRLVP,\n 'dPLVP': dPLVP, 'dRLVC': dRLVC, 'dCLVC': dCLVC, 'EigR': -r, 'dxLVa':\n dxLVa, 'dyLVa': dyLVa, 'dzLVa': dzLVa, 'RIsoLVa': RIsoLVa,\n 'CIsoLVa': CIsoLVa, 'PIsoLVa': PIsoLVa}\n return DynamicsDict\n",
"step-5": "from coarsegrainparams import *\nfrom inva_fcl_stab import *\nfrom Eq import *\nfrom Dynamics import *\nfrom sympy import Matrix,sqrt\n\ndef construct_param_dict(params,K_RC,K_CP,m_P):\n \"\"\"\n Construct all the parameters from its relationships with body size and temperature, using the normalizing constants and scaling exponent w\n \"\"\"\n ###scaling constants\n w=params['w']\n pd=params['pd'] # in 3D and 0.21 in 2D\n pv=params['pv']\n Er=params['Er'] ;Ek=params['Ek']\n ER=params['ER'];EC=params['EC'];EP=params['EP'];\n Eq1=params['Eq1'];Eq2=params['Eq2']\n\n\n #capture success function\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n \n ###variables\n TR= params['TR'] ;TC= params['TC'];TP=params['TP'];D_R= params['D_R']; D_C= params['D_C']\n K_RP=K_RC*K_CP\n fmC=params['fmC'];thermyR=params['thermyR']\n thermyC=params['thermyC'];thermyP=params['thermyP']\n fmPR=params['fmPR']\n fmPC=params['fmPC']\n m_C = K_CP*m_P;m_R = K_RP*m_P\n ###normalization constants and boltzmann constant\n r0 = params['r0']\n k0 = params['k0'] # will depend on the productivity of the habitat\n a01 = a02 = params['a012'] # will depedend on the dimension of the habitat \n a03 = params['a03']\n d0= params['d0']\n q10 = params['q10'];q20 = params['q20'];\n v0R = params['v0R'];v0C =params['v0C'];v0P =params['v0P'];k = b_k\n hC0 = params['hC0'];hP0 = params['hP0'] \n \n #intrapopulation parameters\n q1=set_q1(q10,m_C,w,Eq1,TR,k)\n q2=set_q2(q20,m_P,w,Eq2,TC,k)\n K=set_K(k0,m_R,w,Ek,TR,k)\n r=set_r(r0,m_R,w,Er,TR,k)\n\n #interpopulation parameters\n a1=set_alfa(m_C,a01,K_RC,pv,pd,TR,TC,ER,EC,D_R,v0R,v0C,g,alfa,fmC,thermyR,thermyC,k,a,b,c,formC)\n a2=set_alfa(m_P,a02,K_RP,pv,pd,TR,TP,ER,EP,D_R,v0R,v0P,g,alfa,fmPR,thermyR,thermyP,k,a,b,c,formPR)\n a3=set_alfa(m_P,a03,K_CP,pv,pd,TC,TP,EC,EP,D_C,v0C,v0P,g,alfa,fmPC,thermyC,thermyP,k,a,b,c,formPC)\n\n t_hp = set_th(hP0,m_P,w,EP,k,TP)\n t_hc = set_th(hC0,m_C,w,EC,k,TC)\n param_dict={'q1':q1,'q2':q2,'K':K,'r':r,'a1':a1,'a2':a2,'a3':a3,'t_hp':t_hp,'t_hc':t_hc}\n \n return param_dict\n\ndef construct_equilibrium(params,par_dict,K_RC,K_CP,m_P):\n \"\"\"\n Construct all the functions related to the computation of equilibrium values in the model, in any subsytem\n \"\"\"\n #intrapopulation parameters\n q1=par_dict['q1']\n q2=par_dict['q2']\n q1_0 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n \n K=par_dict['K']\n r=par_dict['r']\n \n m_C = K_CP*m_P\n\n #interpopulation parameters\n a1=par_dict['a1']\n a2=par_dict['a2']\n a3=par_dict['a3']\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n e1=params['e1']\n e2=params['e2']\n e3=params['e3']\n \n\n # Equilibrium values\n ##Sc2\n ###L-V\n R_eq_s2 , C_eq_s2 = set_R_C_eq_sLV(r,K,q1,a1,e1)\n ###R-M\n R_eq_s2RM, C_eq_s2RM = set_R_C_eq_sRM(r,K,q1,q1_0,a1,e1,hC0)\n ##Sc3\n ###L-V\n R_eq_s3,P_eq_s3 = set_R_C_eq_sLV(r,K,q2,a2,e2)\n ###R-M\n R_eq_s3RM , P_eq_s3RM = set_R_C_eq_sRM(r,K,q2,q20,a2,e2,hP0)\n \n\n ###full system ( need to correct this.. in case want to use it, focus at the moment in invasibility stuff)\n R_eq = set_R_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)\n C_eq = set_C_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)\n P_eq = set_P_eq(K,q1,q2,r,a1,a2,a3,e1,e2,e3)\n \n D = setD(K,a1,a2,a3,e1,e2,e3,r)\n DBound= setDBound(K,a1,a2,a3,e1,e2,e3,m_C,r)\n\n #Roots for Req\n R1 = setRoot1(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)\n Dis = setDis(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)\n bR = setb_R(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)\n denR = setden_R(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,m_P,m_C,q20,q1_0,hC0,hP0)\n\n R2 = (bR + sqrt(Dis))/(2*denR)\n R3 = (bR - sqrt(Dis))/(2*denR)\n \n eq_dict={'R_eq_s2':R_eq_s2,'C_eq_s2':C_eq_s2,'R_eq_s3':R_eq_s3,'P_eq_s3':P_eq_s3,'R_eq':R_eq,'C_eq':C_eq,'P_eq':P_eq,\n 'R_eq_s2RM':R_eq_s2RM,'C_eq_s2RM':C_eq_s2RM,'R_eq_s3RM':R_eq_s3RM,'P_eq_s3RM':P_eq_s3RM,'R1':R1,'Discriminant':Dis,'R2':R2,'R3':R3,'bR':bR,'denR':denR,'D' : D,'DBound':DBound}\n return eq_dict\n\n\n\ndef construct_inv_boundaries(params,par_dict,eq_dict,K_RC,K_CP,m_P):\n \"\"\"\n Construct in sympy format all the functions related to the invasibility conditions in each of the explored scenarios\n \"\"\"\n #intrapop params\n q1=par_dict['q1']\n q2=par_dict['q2']\n K =par_dict['K']\n m_C= K_CP*m_P\n q10 = params['q10']\n q20 = params['q20']\n hC0 = params['hC0']\n hP0 = params['hP0']\n\n #interpop params\n a1=par_dict['a1']\n a2=par_dict['a2']\n a3=par_dict['a3']\n e1=params['e1']\n e2=params['e2']\n e3=params['e3']\n \n\n t_hc = par_dict['t_hc']\n t_hp = par_dict['t_hp']\n\n #eq values\n\n #L-V\n R_eq_s2 = eq_dict['R_eq_s2']\n C_eq_s2 = eq_dict['C_eq_s2']\n P_eq_s3 = eq_dict['P_eq_s3']\n R_eq_s3 = eq_dict['R_eq_s3']\n #R-M\n R_eq_s2RM = eq_dict['R_eq_s2RM']\n C_eq_s2RM = eq_dict['C_eq_s2RM']\n R_eq_s3RM = eq_dict['R_eq_s3RM']\n P_eq_s3RM = eq_dict['P_eq_s3RM']\n \n ##Invasibility boundaries\n\n #L-V\n I_C_s2 = set_I_C_s2(e1,a1,K,q1)\n I_P_s3 = set_I_P_s3(e2,a2,K,q2)\n I_P_s4 = set_I_P_s4(e2,e3,a2,a3,q2,R_eq_s2,C_eq_s2)\n I_C_s5 = set_I_C_s5(e1,a1,a3,R_eq_s3,P_eq_s3,q1)\n \n #R-M\n I_C_s2RM = set_I_C_s2RM(e1,a1,K,q1,hC0,q10)\n I_P_s3RM = set_I_P_s3RM(e2,a2,K,q2,hP0,q20)\n I_P_s4RM = set_I_P_s4RM(e2,e3,a2,a3,q2,R_eq_s2RM,C_eq_s2RM,hP0,q20)\n I_C_s5RM = set_I_C_s5RM(e1,e2,a1,a3,m_C,R_eq_s3RM,P_eq_s3RM,q1,t_hc,q10,q20,hP0,hC0) \n\n inv_dict= {'I_C_s2':I_C_s2,'I_P_s3':I_P_s3,'I_P_s4':I_P_s4,'I_C_s5':I_C_s5,\n 'I_C_s2RM':I_C_s2RM,'I_P_s3RM':I_P_s3RM,'I_P_s4RM':I_P_s4RM,'I_C_s5RM':I_C_s5RM}\n\n return inv_dict\n\ndef Trophic_position(params,par_dict,eq_dict):\n \n R_eq = eq_dict['R_eq']\n a2 = par_dict['a2']\n q2 = par_dict['q2']\n e2 = params['e2']\n \n #Trophic position in the coexistence domain\n MTP_C= set_MTP_C(R_eq,a2,q2,e2)\n return MTP_C\n \ndef Stability(params,par_dict,eq_dict,K_RC,K_CP,m_P):\n #intrapop params\n K=par_dict['K']\n r=par_dict['r']\n m_C = K_CP*m_P\n #interpop params\n a1=par_dict['a1']\n a2=par_dict['a2']\n a3=par_dict['a3']\n e1=params['e1']\n e2=params['e2']\n e3=params['e3']\n #equilibrium\n R_eq= eq_dict['R_eq']\n C_eq = eq_dict['C_eq']\n P_eq = eq_dict['P_eq']\n \n \n ##Stability\n D = set_D(K,a1,a2,a3,e1,e2,e3,r)\n d1 = set_d1(r,R_eq,K)\n d2 = set_d2(e1,e2,e3,a1,a2,a3,C_eq,R_eq,P_eq)\n d3 = set_d3(D,a3,C_eq,R_eq,P_eq,K)\n hd2 = set_hdet2(d1,d2,d3)\n\n return hd2\n\ndef Jacobian(dR,dC,dP,R,C,P):\n X = Matrix([dR,dC,dP])\n Y = Matrix([R,C,P])\n return X.jacobian(Y)\n\ndef Jacobian2(dX,dY,X,Y):\n A = Matrix([dX,dY])\n B = Matrix([X,Y])\n return A.jacobian(B)\n\ndef setJacobianDict(DynamicsDict,R,C,P):\n dRLV = DynamicsDict['dxLVa']\n dCLV = DynamicsDict['dyLVa']\n dPLV = DynamicsDict['dzLVa']\n dRRM = DynamicsDict['dRRM']\n dCRM = DynamicsDict['dCRM']\n dPRM = DynamicsDict['dPRM']\n \n dRLVP = DynamicsDict['dRLVP']\n dRLVC = DynamicsDict['dRLVC']\n dPLVP = DynamicsDict['dPLVP']\n dCLVC = DynamicsDict['dCLVC']\n \n JLV = Jacobian(dRLV,dCLV,dPLV,R,C,P)\n JRM = Jacobian(dRRM,dCRM,dPRM,R,C,P)\n JLVP = Jacobian2(dRLVP,dPLVP,R,P)\n JLVC = Jacobian2(dRLVC,dCLVC,R,C)\n \n return {'JLV':JLV,'JRM':JRM,'JLVP':JLVP,'JLVC':JLVC}\n\ndef ConstructDynamicalFunctions(params,par_dict,K_RC,K_CP,m_P,R,C,P):\n #intrapopulation parameters\n q1=par_dict['q1']\n q2=par_dict['q2']\n K=par_dict['K']\n r=par_dict['r']\n e1=params['e1']\n e2=params['e2']\n e3=params['e3']\n m_C = K_CP*m_P\n q20 = params['q20']\n q10 = params['q10']\n #interpopulation parameters\n a1=par_dict['a1']\n a2=par_dict['a2']\n a3=par_dict['a3']\n t_hp=par_dict['t_hp']\n t_hc=par_dict['t_hc']\n hC0=params['hC0']\n hP0=params['hP0']\n \n #Construct LV functions\n dRLV=set_dRLV(R,C,P,r,K,a1,a2)\n dPLV=set_dPLV(R,C,P,a2,a3,e2,e3,q2)\n dCLV=set_dCLV(R,C,P,a1,a3,e1,q1)\n\n dRLVP = set_dRLVPart(R,P,r,K,a2)\n dPLVP = set_dPredLV(R,P,a2,e2,q2)\n\n dRLVC = set_dRLVPart(R,C,r,K,a1)\n dCLVC = set_dPredLV(R,C,a1,e1,q1)\n\n dxLVa,dyLVa,dzLVa = set_LVAdim(R,C,P,r,K,a1,a2,a3,e1,e2,e3,q1,q2)\n\n \n #Construct RM functions\n dRRM = set_dRRM(R,C,P,r,K,a1,a2,a3,t_hp,t_hc,m_C,m_P)\n dCRM = set_dCRM(R,C,P,a1,a2,a3,e1,t_hc,t_hp,q1,m_C,m_P)\n dPRM = set_dPRM(R,C,P,a2,a3,e2,e3,t_hp,q2,m_P) \n\n #RM eq expresions\n\n CNum_eq_RM = setEqCNum_RM(q2,m_P,a2,R,e2,q20,hP0)\n CDen_eq_RM = setEqCDen_RM(e3,q20,hP0)\n PNum_eq_RM = setEqPNum_RM(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,R,C,P,m_P,m_C,q20,q10,hC0,hP0)\n PDen_eq_RM = setEqPDen_RM(K,q1,q2,r,a1,a2,a3,e1,e2,e3,t_hc,t_hp,R,C,P,m_P,m_C,q20,q10,hC0,hP0)\n\n\n C_eq_RM = CNum_eq_RM/CDen_eq_RM\n P_eq_RM = PNum_eq_RM/PDen_eq_RM\n\n\n #Isoclines\n\n RIsoLVa,CIsoLVa,PIsoLVa = set_IsoclinesLVAdim(R,C,P,r,K,a1,a2,a3,e1,e2,e3,q1,q2)\n\n DynamicsDict={'dRLV':dRLV,'dPLV':dPLV,'dCLV':dCLV,'dRRM':dRRM,'dPRM':dPRM,'dCRM':dCRM,'C_eq_RM':C_eq_RM,'P_eq_RM':P_eq_RM,'PNum_eq_RM':PNum_eq_RM,'CNum_eq_RM':CNum_eq_RM,'dRLVP':dRLVP,'dPLVP':dPLVP,'dRLVC':dRLVC,'dCLVC':dCLVC,'EigR':-r,'dxLVa':dxLVa,'dyLVa':dyLVa,'dzLVa':dzLVa,'RIsoLVa':RIsoLVa,'CIsoLVa':CIsoLVa,'PIsoLVa':PIsoLVa}\n return DynamicsDict\n\n \n\n\n \n\n \n \n",
"step-ids": [
5,
8,
9,
10,
11
]
}
|
[
5,
8,
9,
10,
11
] |
import sys
import os
PROJ_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(PROJ_DIR)
|
normal
|
{
"blob_id": "54276074d84e63e6418f8738bb7f910424f1c94d",
"index": 9469,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append(PROJ_DIR)\n",
"step-3": "<mask token>\nPROJ_DIR = os.path.dirname(os.path.dirname(__file__))\nsys.path.append(PROJ_DIR)\n",
"step-4": "import sys\nimport os\nPROJ_DIR = os.path.dirname(os.path.dirname(__file__))\nsys.path.append(PROJ_DIR)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def _create_event_statement(event_name):
"""Return a SQL statement to create a Event vertex."""
field_name_to_value = {'name': event_name, 'event_date':
get_random_date(), 'uuid': get_uuid()}
return create_vertex_statement('Event', field_name_to_value)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _create_event_statement(event_name):
"""Return a SQL statement to create a Event vertex."""
field_name_to_value = {'name': event_name, 'event_date':
get_random_date(), 'uuid': get_uuid()}
return create_vertex_statement('Event', field_name_to_value)
def get_event_generation_commands():
"""Return a list of SQL statements to create all event vertices."""
command_list = []
for event_name in EVENT_NAMES_LIST:
command_list.append(_create_event_statement(event_name))
return command_list
<|reserved_special_token_1|>
<|reserved_special_token_0|>
EVENT_NAMES_LIST = 'Birthday', 'Bar Mitzvah', 'Coronation', 'Re-awakening'
def _create_event_statement(event_name):
"""Return a SQL statement to create a Event vertex."""
field_name_to_value = {'name': event_name, 'event_date':
get_random_date(), 'uuid': get_uuid()}
return create_vertex_statement('Event', field_name_to_value)
def get_event_generation_commands():
"""Return a list of SQL statements to create all event vertices."""
command_list = []
for event_name in EVENT_NAMES_LIST:
command_list.append(_create_event_statement(event_name))
return command_list
<|reserved_special_token_1|>
from .utils import create_vertex_statement, get_random_date, get_uuid
EVENT_NAMES_LIST = 'Birthday', 'Bar Mitzvah', 'Coronation', 'Re-awakening'
def _create_event_statement(event_name):
"""Return a SQL statement to create a Event vertex."""
field_name_to_value = {'name': event_name, 'event_date':
get_random_date(), 'uuid': get_uuid()}
return create_vertex_statement('Event', field_name_to_value)
def get_event_generation_commands():
"""Return a list of SQL statements to create all event vertices."""
command_list = []
for event_name in EVENT_NAMES_LIST:
command_list.append(_create_event_statement(event_name))
return command_list
<|reserved_special_token_1|>
# Copyright 2018-present Kensho Technologies, LLC.
from .utils import create_vertex_statement, get_random_date, get_uuid
EVENT_NAMES_LIST = (
"Birthday",
"Bar Mitzvah",
"Coronation",
"Re-awakening",
)
def _create_event_statement(event_name):
"""Return a SQL statement to create a Event vertex."""
field_name_to_value = {'name': event_name, 'event_date': get_random_date(), 'uuid': get_uuid()}
return create_vertex_statement('Event', field_name_to_value)
def get_event_generation_commands():
"""Return a list of SQL statements to create all event vertices."""
command_list = []
for event_name in EVENT_NAMES_LIST:
command_list.append(_create_event_statement(event_name))
return command_list
|
flexible
|
{
"blob_id": "a521befba58aa85c2fcfe6006db4b161123585f1",
"index": 5341,
"step-1": "<mask token>\n\n\ndef _create_event_statement(event_name):\n \"\"\"Return a SQL statement to create a Event vertex.\"\"\"\n field_name_to_value = {'name': event_name, 'event_date':\n get_random_date(), 'uuid': get_uuid()}\n return create_vertex_statement('Event', field_name_to_value)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _create_event_statement(event_name):\n \"\"\"Return a SQL statement to create a Event vertex.\"\"\"\n field_name_to_value = {'name': event_name, 'event_date':\n get_random_date(), 'uuid': get_uuid()}\n return create_vertex_statement('Event', field_name_to_value)\n\n\ndef get_event_generation_commands():\n \"\"\"Return a list of SQL statements to create all event vertices.\"\"\"\n command_list = []\n for event_name in EVENT_NAMES_LIST:\n command_list.append(_create_event_statement(event_name))\n return command_list\n",
"step-3": "<mask token>\nEVENT_NAMES_LIST = 'Birthday', 'Bar Mitzvah', 'Coronation', 'Re-awakening'\n\n\ndef _create_event_statement(event_name):\n \"\"\"Return a SQL statement to create a Event vertex.\"\"\"\n field_name_to_value = {'name': event_name, 'event_date':\n get_random_date(), 'uuid': get_uuid()}\n return create_vertex_statement('Event', field_name_to_value)\n\n\ndef get_event_generation_commands():\n \"\"\"Return a list of SQL statements to create all event vertices.\"\"\"\n command_list = []\n for event_name in EVENT_NAMES_LIST:\n command_list.append(_create_event_statement(event_name))\n return command_list\n",
"step-4": "from .utils import create_vertex_statement, get_random_date, get_uuid\nEVENT_NAMES_LIST = 'Birthday', 'Bar Mitzvah', 'Coronation', 'Re-awakening'\n\n\ndef _create_event_statement(event_name):\n \"\"\"Return a SQL statement to create a Event vertex.\"\"\"\n field_name_to_value = {'name': event_name, 'event_date':\n get_random_date(), 'uuid': get_uuid()}\n return create_vertex_statement('Event', field_name_to_value)\n\n\ndef get_event_generation_commands():\n \"\"\"Return a list of SQL statements to create all event vertices.\"\"\"\n command_list = []\n for event_name in EVENT_NAMES_LIST:\n command_list.append(_create_event_statement(event_name))\n return command_list\n",
"step-5": "# Copyright 2018-present Kensho Technologies, LLC.\nfrom .utils import create_vertex_statement, get_random_date, get_uuid\n\n\nEVENT_NAMES_LIST = (\n \"Birthday\",\n \"Bar Mitzvah\",\n \"Coronation\",\n \"Re-awakening\",\n)\n\n\ndef _create_event_statement(event_name):\n \"\"\"Return a SQL statement to create a Event vertex.\"\"\"\n field_name_to_value = {'name': event_name, 'event_date': get_random_date(), 'uuid': get_uuid()}\n return create_vertex_statement('Event', field_name_to_value)\n\n\ndef get_event_generation_commands():\n \"\"\"Return a list of SQL statements to create all event vertices.\"\"\"\n command_list = []\n\n for event_name in EVENT_NAMES_LIST:\n command_list.append(_create_event_statement(event_name))\n\n return command_list\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def create_app():
app = Flask(__name__)
Bootstrap(app)
return app
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_app():
app = Flask(__name__)
Bootstrap(app)
return app
logging.basicConfig(level=logging.DEBUG)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_app():
app = Flask(__name__)
Bootstrap(app)
return app
logging.basicConfig(level=logging.DEBUG)
app = create_app()
app.config['WTF_CSRF_ENABLED'] = True
app.config['SECRET_KEY'] = 'you-will-never-guess'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path
.dirname(os.path.dirname(os.path.realpath(__file__))), 'db',
'micro_scrabble.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import os, sys
import logging
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
def create_app():
app = Flask(__name__)
Bootstrap(app)
return app
logging.basicConfig(level=logging.DEBUG)
app = create_app()
app.config['WTF_CSRF_ENABLED'] = True
app.config['SECRET_KEY'] = 'you-will-never-guess'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path
.dirname(os.path.dirname(os.path.realpath(__file__))), 'db',
'micro_scrabble.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
from . import views
<|reserved_special_token_1|>
import os,sys
import logging
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
def create_app():
app = Flask(__name__)
Bootstrap(app)
return app
logging.basicConfig(level=logging.DEBUG)
app = create_app()
app.config['WTF_CSRF_ENABLED'] = True
app.config['SECRET_KEY'] = 'you-will-never-guess'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),'db','micro_scrabble.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
#app.config.from_object('flask_config')
from . import views
|
flexible
|
{
"blob_id": "bd726c86bdecd0b63eb48d056932706d3ecf147d",
"index": 7665,
"step-1": "<mask token>\n\n\ndef create_app():\n app = Flask(__name__)\n Bootstrap(app)\n return app\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_app():\n app = Flask(__name__)\n Bootstrap(app)\n return app\n\n\nlogging.basicConfig(level=logging.DEBUG)\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_app():\n app = Flask(__name__)\n Bootstrap(app)\n return app\n\n\nlogging.basicConfig(level=logging.DEBUG)\napp = create_app()\napp.config['WTF_CSRF_ENABLED'] = True\napp.config['SECRET_KEY'] = 'you-will-never-guess'\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path\n .dirname(os.path.dirname(os.path.realpath(__file__))), 'db',\n 'micro_scrabble.db')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True\ndb = SQLAlchemy(app)\n<mask token>\n",
"step-4": "import os, sys\nimport logging\nfrom flask import Flask\nfrom flask_bootstrap import Bootstrap\nfrom flask_sqlalchemy import SQLAlchemy\n\n\ndef create_app():\n app = Flask(__name__)\n Bootstrap(app)\n return app\n\n\nlogging.basicConfig(level=logging.DEBUG)\napp = create_app()\napp.config['WTF_CSRF_ENABLED'] = True\napp.config['SECRET_KEY'] = 'you-will-never-guess'\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path\n .dirname(os.path.dirname(os.path.realpath(__file__))), 'db',\n 'micro_scrabble.db')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True\ndb = SQLAlchemy(app)\nfrom . import views\n",
"step-5": "import os,sys\nimport logging\nfrom flask import Flask\nfrom flask_bootstrap import Bootstrap\nfrom flask_sqlalchemy import SQLAlchemy\n\ndef create_app():\n app = Flask(__name__)\n Bootstrap(app)\n return app\n\nlogging.basicConfig(level=logging.DEBUG)\napp = create_app()\napp.config['WTF_CSRF_ENABLED'] = True\napp.config['SECRET_KEY'] = 'you-will-never-guess'\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),'db','micro_scrabble.db')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True\ndb = SQLAlchemy(app)\n#app.config.from_object('flask_config')\n\nfrom . import views\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
helpMessage = (
"""
**Vocal / Musique**
`{0}join`
Va rejoindre le salon vocale dans laquelle vous êtes.
`{0}leave`
Va partir du salon vocale dans laquelle vous êtes.
`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`
Commencera à jouer l'audio de la vidéo / chanson fournie.
`{0}pause`
Mettra en pause le flux audio actuel.
`{0}resume`
Va reprendre le flux audio actuel.
`{0}stop`
Arrêter et terminer le flux audio.
~~**=========================================**~~
**Administrateur**
`{0}invite`
Envoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}shutdown`
Va faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}status [status here]`
Définira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)
~~**=========================================**~~
**Mini-Games**
`{0}joke`
Postes une blague aléatoire Chuck Norris.
`{0}8ball`
Pose n'importe quelle question à 8-Ball.
`{0}coinflip`
Va retourner une pièce et afficher le résultat.
`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`
Va lancer les dés spécifiés et poster le résultat.
`{0}slots`
Va poster un résultat de machine à sous.
~~**=========================================**~~
**Random Commandes**
`{0}cat`
Va poster une image de chat aléatoire ou gif.
`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`
Va poster un fait de chat au hasard.
`{0}catgif`
Va poster un gif de chat aléatoire.
`{0}dog`
Va poster une image de chien aléatoire.
`{0}rabbit`
Va poster une image de lapin aléatoire.
`{0}face`
Poste un visage random depuis une DB de +270 visages
~~**=========================================**~~
**Jeux**
`{0}hots [hotslogs player ID]` - Example: !hots 3141592
Publiera le MMR du joueur pour le match rapide et la ligue des héros.
`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt
Va poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères."""
.format(config.COMMANDPREFIX))
<|reserved_special_token_1|>
from settings import *
helpMessage = (
"""
**Vocal / Musique**
`{0}join`
Va rejoindre le salon vocale dans laquelle vous êtes.
`{0}leave`
Va partir du salon vocale dans laquelle vous êtes.
`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`
Commencera à jouer l'audio de la vidéo / chanson fournie.
`{0}pause`
Mettra en pause le flux audio actuel.
`{0}resume`
Va reprendre le flux audio actuel.
`{0}stop`
Arrêter et terminer le flux audio.
~~**=========================================**~~
**Administrateur**
`{0}invite`
Envoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}shutdown`
Va faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}status [status here]`
Définira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)
~~**=========================================**~~
**Mini-Games**
`{0}joke`
Postes une blague aléatoire Chuck Norris.
`{0}8ball`
Pose n'importe quelle question à 8-Ball.
`{0}coinflip`
Va retourner une pièce et afficher le résultat.
`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`
Va lancer les dés spécifiés et poster le résultat.
`{0}slots`
Va poster un résultat de machine à sous.
~~**=========================================**~~
**Random Commandes**
`{0}cat`
Va poster une image de chat aléatoire ou gif.
`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`
Va poster un fait de chat au hasard.
`{0}catgif`
Va poster un gif de chat aléatoire.
`{0}dog`
Va poster une image de chien aléatoire.
`{0}rabbit`
Va poster une image de lapin aléatoire.
`{0}face`
Poste un visage random depuis une DB de +270 visages
~~**=========================================**~~
**Jeux**
`{0}hots [hotslogs player ID]` - Example: !hots 3141592
Publiera le MMR du joueur pour le match rapide et la ligue des héros.
`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt
Va poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères."""
.format(config.COMMANDPREFIX))
<|reserved_special_token_1|>
from settings import *
helpMessage = '''
**Vocal / Musique**
`{0}join`
Va rejoindre le salon vocale dans laquelle vous êtes.
`{0}leave`
Va partir du salon vocale dans laquelle vous êtes.
`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`
Commencera à jouer l'audio de la vidéo / chanson fournie.
`{0}pause`
Mettra en pause le flux audio actuel.
`{0}resume`
Va reprendre le flux audio actuel.
`{0}stop`
Arrêter et terminer le flux audio.
~~**=========================================**~~
**Administrateur**
`{0}invite`
Envoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}shutdown`
Va faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)
`{0}status [status here]`
Définira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)
~~**=========================================**~~
**Mini-Games**
`{0}joke`
Postes une blague aléatoire Chuck Norris.
`{0}8ball`
Pose n'importe quelle question à 8-Ball.
`{0}coinflip`
Va retourner une pièce et afficher le résultat.
`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`
Va lancer les dés spécifiés et poster le résultat.
`{0}slots`
Va poster un résultat de machine à sous.
~~**=========================================**~~
**Random Commandes**
`{0}cat`
Va poster une image de chat aléatoire ou gif.
`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`
Va poster un fait de chat au hasard.
`{0}catgif`
Va poster un gif de chat aléatoire.
`{0}dog`
Va poster une image de chien aléatoire.
`{0}rabbit`
Va poster une image de lapin aléatoire.
`{0}face`
Poste un visage random depuis une DB de +270 visages
~~**=========================================**~~
**Jeux**
`{0}hots [hotslogs player ID]` - Example: !hots 3141592
Publiera le MMR du joueur pour le match rapide et la ligue des héros.
`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt
Va poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères.'''.format(config.COMMANDPREFIX)
|
flexible
|
{
"blob_id": "f7283750923e1e430ff1f648878bbb9a0c73d2c4",
"index": 7880,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nhelpMessage = (\n \"\"\"\n**Vocal / Musique**\n\n`{0}join`\nVa rejoindre le salon vocale dans laquelle vous êtes.\n\n`{0}leave`\nVa partir du salon vocale dans laquelle vous êtes.\n\n`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`\nCommencera à jouer l'audio de la vidéo / chanson fournie.\n\n`{0}pause`\nMettra en pause le flux audio actuel.\n\n`{0}resume`\nVa reprendre le flux audio actuel.\n\n`{0}stop`\nArrêter et terminer le flux audio.\n\n~~**=========================================**~~\n\n**Administrateur**\n\n`{0}invite`\nEnvoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n`{0}shutdown`\nVa faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n`{0}status [status here]`\nDéfinira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n~~**=========================================**~~\n\n**Mini-Games**\n\n`{0}joke`\nPostes une blague aléatoire Chuck Norris.\n\n`{0}8ball`\nPose n'importe quelle question à 8-Ball.\n\n`{0}coinflip`\nVa retourner une pièce et afficher le résultat.\n\n`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`\nVa lancer les dés spécifiés et poster le résultat.\n\n`{0}slots`\nVa poster un résultat de machine à sous.\n\n~~**=========================================**~~\n\n**Random Commandes**\n\n`{0}cat`\nVa poster une image de chat aléatoire ou gif.\n\n`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`\nVa poster un fait de chat au hasard.\n\n`{0}catgif`\nVa poster un gif de chat aléatoire.\n\n`{0}dog`\nVa poster une image de chien aléatoire.\n\n`{0}rabbit`\nVa poster une image de lapin aléatoire.\n\n`{0}face`\nPoste un visage random depuis une DB de +270 visages\n\n~~**=========================================**~~\n\n**Jeux**\n\n`{0}hots [hotslogs player ID]` - Example: !hots 3141592\nPubliera le MMR du joueur pour le match rapide et la ligue des héros.\n\n`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt\nVa poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères.\"\"\"\n .format(config.COMMANDPREFIX))\n",
"step-3": "from settings import *\nhelpMessage = (\n \"\"\"\n**Vocal / Musique**\n\n`{0}join`\nVa rejoindre le salon vocale dans laquelle vous êtes.\n\n`{0}leave`\nVa partir du salon vocale dans laquelle vous êtes.\n\n`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`\nCommencera à jouer l'audio de la vidéo / chanson fournie.\n\n`{0}pause`\nMettra en pause le flux audio actuel.\n\n`{0}resume`\nVa reprendre le flux audio actuel.\n\n`{0}stop`\nArrêter et terminer le flux audio.\n\n~~**=========================================**~~\n\n**Administrateur**\n\n`{0}invite`\nEnvoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n`{0}shutdown`\nVa faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n`{0}status [status here]`\nDéfinira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)\n\n~~**=========================================**~~\n\n**Mini-Games**\n\n`{0}joke`\nPostes une blague aléatoire Chuck Norris.\n\n`{0}8ball`\nPose n'importe quelle question à 8-Ball.\n\n`{0}coinflip`\nVa retourner une pièce et afficher le résultat.\n\n`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`\nVa lancer les dés spécifiés et poster le résultat.\n\n`{0}slots`\nVa poster un résultat de machine à sous.\n\n~~**=========================================**~~\n\n**Random Commandes**\n\n`{0}cat`\nVa poster une image de chat aléatoire ou gif.\n\n`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`\nVa poster un fait de chat au hasard.\n\n`{0}catgif`\nVa poster un gif de chat aléatoire.\n\n`{0}dog`\nVa poster une image de chien aléatoire.\n\n`{0}rabbit`\nVa poster une image de lapin aléatoire.\n\n`{0}face`\nPoste un visage random depuis une DB de +270 visages\n\n~~**=========================================**~~\n\n**Jeux**\n\n`{0}hots [hotslogs player ID]` - Example: !hots 3141592\nPubliera le MMR du joueur pour le match rapide et la ligue des héros.\n\n`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt\nVa poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères.\"\"\"\n .format(config.COMMANDPREFIX))\n",
"step-4": "from settings import *\r\n\r\nhelpMessage = '''\r\n**Vocal / Musique**\r\n\r\n`{0}join`\r\nVa rejoindre le salon vocale dans laquelle vous êtes.\r\n\r\n`{0}leave`\r\nVa partir du salon vocale dans laquelle vous êtes.\r\n\r\n`{0}play [YouTube Url]` *ou* `{0}play [musique ou video à rechercher]`\r\nCommencera à jouer l'audio de la vidéo / chanson fournie.\r\n\r\n`{0}pause`\r\nMettra en pause le flux audio actuel.\r\n\r\n`{0}resume`\r\nVa reprendre le flux audio actuel.\r\n\r\n`{0}stop`\r\nArrêter et terminer le flux audio.\r\n\r\n~~**=========================================**~~\r\n\r\n**Administrateur**\r\n\r\n`{0}invite`\r\nEnvoie un message personnel avec le lien d'invitation du bot. (Ne fonctionnera que pour le propriétaire du bot.)\r\n\r\n`{0}shutdown`\r\nVa faire la déconnexion et l'arrêt du bot. (Ne fonctionnera que pour le propriétaire du bot.)\r\n\r\n`{0}status [status here]`\r\nDéfinira le statut de jeu du bot. Ne fonctionnera que pour le propriétaire du bot. (Ne fonctionnera que pour le propriétaire du bot.)\r\n\r\n~~**=========================================**~~\r\n\r\n**Mini-Games**\r\n\r\n`{0}joke`\r\nPostes une blague aléatoire Chuck Norris.\r\n\r\n`{0}8ball`\r\nPose n'importe quelle question à 8-Ball.\r\n\r\n`{0}coinflip`\r\nVa retourner une pièce et afficher le résultat.\r\n\r\n`{0}roll [# of dice] D[# of sides] Example: !roll 3 D6`\r\nVa lancer les dés spécifiés et poster le résultat.\r\n\r\n`{0}slots`\r\nVa poster un résultat de machine à sous.\r\n\r\n~~**=========================================**~~\r\n\r\n**Random Commandes**\r\n\r\n`{0}cat`\r\nVa poster une image de chat aléatoire ou gif.\r\n\r\n`{0}catfact (ACTUELLEMENT HORS DE COMMANDE INDISPONIBLE)`\r\nVa poster un fait de chat au hasard.\r\n\r\n`{0}catgif`\r\nVa poster un gif de chat aléatoire.\r\n\r\n`{0}dog`\r\nVa poster une image de chien aléatoire.\r\n\r\n`{0}rabbit`\r\nVa poster une image de lapin aléatoire.\r\n\r\n`{0}face`\r\nPoste un visage random depuis une DB de +270 visages\r\n\r\n~~**=========================================**~~\r\n\r\n**Jeux**\r\n\r\n`{0}hots [hotslogs player ID]` - Example: !hots 3141592\r\nPubliera le MMR du joueur pour le match rapide et la ligue des héros.\r\n\r\n`{0}gwent [Nom de la Carte]` - Example: !gwent Geralt\r\nVa poster la description de la carte et l'image de la carte gwent. A une longueur de recherche maximale de 10 caractères.'''.format(config.COMMANDPREFIX)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from pwn import *
hostname = "pwnable.kr"
portnum = 2222
username = "input2"
passwd = "guest"
def main():
args = ["./input"]
print("./input", end="")
for x in range(99):
print(" AA", end="")
args.append("AA")
print(args)
'''
s = ssh(host=hostname,
port=portnum,
user=username,
password=passwd)
p = s.process(args)
p.interactive()
'''
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "9184779731d6102498934d77b6d3c0283fc594d9",
"index": 7498,
"step-1": "<mask token>\n\n\ndef main():\n args = ['./input']\n print('./input', end='')\n for x in range(99):\n print(' AA', end='')\n args.append('AA')\n print(args)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n args = ['./input']\n print('./input', end='')\n for x in range(99):\n print(' AA', end='')\n args.append('AA')\n print(args)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nhostname = 'pwnable.kr'\nportnum = 2222\nusername = 'input2'\npasswd = 'guest'\n\n\ndef main():\n args = ['./input']\n print('./input', end='')\n for x in range(99):\n print(' AA', end='')\n args.append('AA')\n print(args)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-4": "from pwn import *\nhostname = 'pwnable.kr'\nportnum = 2222\nusername = 'input2'\npasswd = 'guest'\n\n\ndef main():\n args = ['./input']\n print('./input', end='')\n for x in range(99):\n print(' AA', end='')\n args.append('AA')\n print(args)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-5": "from pwn import *\n\nhostname = \"pwnable.kr\"\nportnum = 2222\nusername = \"input2\"\npasswd = \"guest\"\n\ndef main():\n\n args = [\"./input\"]\n print(\"./input\", end=\"\")\n for x in range(99):\n print(\" AA\", end=\"\")\n args.append(\"AA\")\n\n print(args)\n\n'''\n s = ssh(host=hostname,\n port=portnum,\n user=username,\n password=passwd)\n p = s.process(args)\n\n p.interactive()\n'''\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class SpecificationsEventHandler(FileSystemEventHandler):
<|reserved_special_token_0|>
def __init__(self):
self.paused = False
self.banner = (
'============================================================')
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith('_specs.py') and not self.paused:
self.paused = True
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end='\n')
print('testing specifications found in file: {0}'.format(file))
print('')
subprocess.call(['mamba', file], shell=True)
print(self.banner, end='\n')
self.paused = False
return
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SpecificationsEventHandler(FileSystemEventHandler):
"""Runs the tests inside the specifications class when any specification file is modified
"""
def __init__(self):
self.paused = False
self.banner = (
'============================================================')
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith('_specs.py') and not self.paused:
self.paused = True
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end='\n')
print('testing specifications found in file: {0}'.format(file))
print('')
subprocess.call(['mamba', file], shell=True)
print(self.banner, end='\n')
self.paused = False
return
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SpecificationsEventHandler(FileSystemEventHandler):
"""Runs the tests inside the specifications class when any specification file is modified
"""
def __init__(self):
self.paused = False
self.banner = (
'============================================================')
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith('_specs.py') and not self.paused:
self.paused = True
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end='\n')
print('testing specifications found in file: {0}'.format(file))
print('')
subprocess.call(['mamba', file], shell=True)
print(self.banner, end='\n')
self.paused = False
return
if __name__ == '__main__':
path = sys.argv[1]
event_handler = SpecificationsEventHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import sys
import os.path
import subprocess
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class SpecificationsEventHandler(FileSystemEventHandler):
"""Runs the tests inside the specifications class when any specification file is modified
"""
def __init__(self):
self.paused = False
self.banner = (
'============================================================')
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith('_specs.py') and not self.paused:
self.paused = True
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end='\n')
print('testing specifications found in file: {0}'.format(file))
print('')
subprocess.call(['mamba', file], shell=True)
print(self.banner, end='\n')
self.paused = False
return
if __name__ == '__main__':
path = sys.argv[1]
event_handler = SpecificationsEventHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
<|reserved_special_token_1|>
"""
module : watcher.py
description : Script to automatically watch a directory (via watchdog) for tests and run them via py.test
"""
import sys
import os.path
import subprocess
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class SpecificationsEventHandler(FileSystemEventHandler):
"""Runs the tests inside the specifications class when any specification file is modified
"""
def __init__(self):
self.paused = False
self.banner = "============================================================"
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
# file modified triggers directory modified as well...
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith("_specs.py") and not self.paused:
self.paused = True
#filename = os.path.basename(event.src_path)
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end="\n")
print("testing specifications found in file: {0}".format(file))
print("")
# if using pytest, uncomment the line below
#subprocess.call(['py.test', '-v', file], shell=True)
#using mamba as the test engine:
subprocess.call(['mamba', file], shell=True)
print(self.banner, end="\n")
self.paused = False
return
if __name__ == "__main__":
path = sys.argv[1]
event_handler = SpecificationsEventHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
flexible
|
{
"blob_id": "95ea8a21d3ac44c7760179bc4ebf67f0c16e6a19",
"index": 2421,
"step-1": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n <mask token>\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join()\n",
"step-4": "<mask token>\nimport sys\nimport os.path\nimport subprocess\nimport time\nfrom watchdog.observers import Observer\nfrom watchdog.events import FileSystemEventHandler\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join()\n",
"step-5": "\"\"\"\nmodule\t\t\t: watcher.py\ndescription\t: Script to automatically watch a directory (via watchdog) for tests and run them via py.test\n\"\"\"\nimport sys\nimport os.path\nimport subprocess\nimport time\nfrom watchdog.observers import Observer\nfrom watchdog.events import FileSystemEventHandler\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n\t\"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\t\n\tdef __init__(self): \n\t\tself.paused = False\n\t\tself.banner = \"============================================================\"\n \n\tdef on_modified(self, event):\n\t\tsuper(SpecificationsEventHandler, self).on_modified(event)\n\t\t\"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n\t\t\n\t\t\n\t\t# file modified triggers directory modified as well...\t\t\n\t\tif event.is_directory:\n\t\t\treturn\n\n\t\tif self.paused: \n\t\t\treturn\n\n\t\tif event.src_path.endswith(\"_specs.py\") and not self.paused:\n\t\t\tself.paused = True\n\t\t\t#filename = os.path.basename(event.src_path)\n\t\t\tdirectory = os.path.abspath(os.path.dirname(event.src_path))\n\t\t\tfilename = os.path.basename(event.src_path)\n\t\t\tfile = os.path.join(directory, filename)\n\n\t\t\tprint(self.banner, end=\"\\n\")\n\t\t\tprint(\"testing specifications found in file: {0}\".format(file))\n\t\t\tprint(\"\")\n\t\t\t\n\t\t\t# if using pytest, uncomment the line below\n\t\t\t#subprocess.call(['py.test', '-v', file], shell=True)\t\n\t\t\t\n\t\t\t#using mamba as the test engine:\n\t\t\tsubprocess.call(['mamba', file], shell=True)\t\n\n\t\t\tprint(self.banner, end=\"\\n\")\n\n\t\t\tself.paused = False\n\t\t\treturn\n\n\nif __name__ == \"__main__\":\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join() \n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
batch_size = 64
valid_batch_size = 8
dataset_size = 500
learning_rate = 0.001
weight_decay = 0.0001
epochs = 30
show_frq = 20
negative_size = 10
negative_expand = 1
negative_size_bound = 20
negative_retake = True
load_read_model = False
save_dir = '/cos_person/data/'
torch.backends.cudnn.benchmark = True
dm = DataEmbedding()
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size, data_type='train')
valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='valid')
print(len(dataset))
model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.
embedding_size, hidden_dim=150, tagset_size=50, negative_size=
negative_size)
embedding_matrix = torch.Tensor(dm.get_embedding_matrix())
print('before model:' + get_memory_use())
if torch.cuda.is_available():
embedding_matrix = embedding_matrix.cuda()
model = model.cuda()
model.encoder.embedding.weight.data.copy_(embedding_matrix)
print('after model:' + get_memory_use())
train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,
shuffle=True, drop_last=True)
valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
optimizer = optim.Adam(model.parameters(), lr=learning_rate,
weight_decay=weight_decay, amsgrad=True)
train_accu_list = []
train_loss_list = []
valid_accu_list = []
valid_loss_list = []
trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,
batch_size=batch_size, data_size=len(train_loader), threshold_decay
=True)
valider = Evaluator(model=model, loader=valid_loader, batch_size=
valid_batch_size)
for epoch in range(1, epochs + 1):
print('before:' + get_memory_use())
print('Epoch {} start...'.format(epoch))
model.reset_negative(dataset.negative_size)
trainer.train(epoch=epoch, show_frq=show_frq, accu_list=
train_accu_list, loss_list=train_loss_list)
print('train after:' + get_memory_use())
model.reset_negative(valid_dataset.negative_size)
valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=
valid_loss_list)
print('valid after:' + get_memory_use())
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
if negative_retake:
if negative_size + negative_expand <= negative_size_bound:
negative_size += negative_expand
del dataset
del train_loader
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size)
train_loader = data.DataLoader(dataset=dataset, batch_size=
batch_size, shuffle=True, drop_last=True)
trainer.loader = train_loader
if epochs - epoch <= 5:
load_read_model = True
if load_read_model:
if epoch <= 1:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[
-1] == valid_accu_list[-2] and valid_loss_list[-1
] < valid_loss_list[-2]:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
else:
checkpoint = load_checkpoint(save_dir + 'check.pkl')
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
trainer.model = model
trainer.optimizer = optimizer
trainer._lr_decay(0.8)
valider.model = model
else:
torch.save(model, save_dir + 'model.pkl')
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')
torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')
torch.save(model, save_dir + 'model.pkl')
test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='test')
test_loader = data.DataLoader(dataset=test_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
tester = Evaluator(model=model, loader=test_loader, batch_size=
valid_batch_size)
test_accu_list = []
test_loss_list = []
model.reset_negative(test_dataset.negative_size)
tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list
)
torch.save(test_loss_list, save_dir + 'test_loss.pkl')
torch.save(test_accu_list, save_dir + 'test_accu.pkl')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
batch_size = 64
valid_batch_size = 8
dataset_size = 500
learning_rate = 0.001
weight_decay = 0.0001
epochs = 30
show_frq = 20
negative_size = 10
negative_expand = 1
negative_size_bound = 20
negative_retake = True
load_read_model = False
save_dir = '/cos_person/data/'
torch.backends.cudnn.benchmark = True
dm = DataEmbedding()
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size, data_type='train')
valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='valid')
print(len(dataset))
model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.
embedding_size, hidden_dim=150, tagset_size=50, negative_size=
negative_size)
embedding_matrix = torch.Tensor(dm.get_embedding_matrix())
print('before model:' + get_memory_use())
if torch.cuda.is_available():
embedding_matrix = embedding_matrix.cuda()
model = model.cuda()
model.encoder.embedding.weight.data.copy_(embedding_matrix)
print('after model:' + get_memory_use())
train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,
shuffle=True, drop_last=True)
valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
optimizer = optim.Adam(model.parameters(), lr=learning_rate,
weight_decay=weight_decay, amsgrad=True)
train_accu_list = []
train_loss_list = []
valid_accu_list = []
valid_loss_list = []
trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,
batch_size=batch_size, data_size=len(train_loader), threshold_decay
=True)
valider = Evaluator(model=model, loader=valid_loader, batch_size=
valid_batch_size)
for epoch in range(1, epochs + 1):
print('before:' + get_memory_use())
print('Epoch {} start...'.format(epoch))
model.reset_negative(dataset.negative_size)
trainer.train(epoch=epoch, show_frq=show_frq, accu_list=
train_accu_list, loss_list=train_loss_list)
print('train after:' + get_memory_use())
model.reset_negative(valid_dataset.negative_size)
valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=
valid_loss_list)
print('valid after:' + get_memory_use())
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
if negative_retake:
if negative_size + negative_expand <= negative_size_bound:
negative_size += negative_expand
del dataset
del train_loader
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size)
train_loader = data.DataLoader(dataset=dataset, batch_size=
batch_size, shuffle=True, drop_last=True)
trainer.loader = train_loader
if epochs - epoch <= 5:
load_read_model = True
if load_read_model:
if epoch <= 1:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[
-1] == valid_accu_list[-2] and valid_loss_list[-1
] < valid_loss_list[-2]:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
else:
checkpoint = load_checkpoint(save_dir + 'check.pkl')
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
trainer.model = model
trainer.optimizer = optimizer
trainer._lr_decay(0.8)
valider.model = model
else:
torch.save(model, save_dir + 'model.pkl')
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')
torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')
torch.save(model, save_dir + 'model.pkl')
test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='test')
test_loader = data.DataLoader(dataset=test_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
tester = Evaluator(model=model, loader=test_loader, batch_size=
valid_batch_size)
test_accu_list = []
test_loss_list = []
model.reset_negative(test_dataset.negative_size)
tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list
)
torch.save(test_loss_list, save_dir + 'test_loss.pkl')
torch.save(test_accu_list, save_dir + 'test_accu.pkl')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import torch
import torch.optim as optim
import torch.nn as nn
import torch.utils.data as data
from dataset import InsuranceAnswerDataset, DataEmbedding
from model import Matcher
from tools import Trainer, Evaluator
from tools import save_checkpoint, load_checkpoint, get_memory_use
def main():
batch_size = 64
valid_batch_size = 8
dataset_size = 500
learning_rate = 0.001
weight_decay = 0.0001
epochs = 30
show_frq = 20
negative_size = 10
negative_expand = 1
negative_size_bound = 20
negative_retake = True
load_read_model = False
save_dir = '/cos_person/data/'
torch.backends.cudnn.benchmark = True
dm = DataEmbedding()
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size, data_type='train')
valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='valid')
print(len(dataset))
model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.
embedding_size, hidden_dim=150, tagset_size=50, negative_size=
negative_size)
embedding_matrix = torch.Tensor(dm.get_embedding_matrix())
print('before model:' + get_memory_use())
if torch.cuda.is_available():
embedding_matrix = embedding_matrix.cuda()
model = model.cuda()
model.encoder.embedding.weight.data.copy_(embedding_matrix)
print('after model:' + get_memory_use())
train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,
shuffle=True, drop_last=True)
valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
optimizer = optim.Adam(model.parameters(), lr=learning_rate,
weight_decay=weight_decay, amsgrad=True)
train_accu_list = []
train_loss_list = []
valid_accu_list = []
valid_loss_list = []
trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,
batch_size=batch_size, data_size=len(train_loader), threshold_decay
=True)
valider = Evaluator(model=model, loader=valid_loader, batch_size=
valid_batch_size)
for epoch in range(1, epochs + 1):
print('before:' + get_memory_use())
print('Epoch {} start...'.format(epoch))
model.reset_negative(dataset.negative_size)
trainer.train(epoch=epoch, show_frq=show_frq, accu_list=
train_accu_list, loss_list=train_loss_list)
print('train after:' + get_memory_use())
model.reset_negative(valid_dataset.negative_size)
valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=
valid_loss_list)
print('valid after:' + get_memory_use())
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
if negative_retake:
if negative_size + negative_expand <= negative_size_bound:
negative_size += negative_expand
del dataset
del train_loader
dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=negative_size)
train_loader = data.DataLoader(dataset=dataset, batch_size=
batch_size, shuffle=True, drop_last=True)
trainer.loader = train_loader
if epochs - epoch <= 5:
load_read_model = True
if load_read_model:
if epoch <= 1:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[
-1] == valid_accu_list[-2] and valid_loss_list[-1
] < valid_loss_list[-2]:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=
model, optimizer=optimizer)
else:
checkpoint = load_checkpoint(save_dir + 'check.pkl')
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
trainer.model = model
trainer.optimizer = optimizer
trainer._lr_decay(0.8)
valider.model = model
else:
torch.save(model, save_dir + 'model.pkl')
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')
torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')
torch.save(model, save_dir + 'model.pkl')
test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,
negative_size=400, data_type='test')
test_loader = data.DataLoader(dataset=test_dataset, batch_size=
valid_batch_size, shuffle=True, drop_last=True)
tester = Evaluator(model=model, loader=test_loader, batch_size=
valid_batch_size)
test_accu_list = []
test_loss_list = []
model.reset_negative(test_dataset.negative_size)
tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list
)
torch.save(test_loss_list, save_dir + 'test_loss.pkl')
torch.save(test_accu_list, save_dir + 'test_accu.pkl')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import torch
import torch.optim as optim
import torch.nn as nn
import torch.utils.data as data
from dataset import InsuranceAnswerDataset, DataEmbedding
from model import Matcher
from tools import Trainer, Evaluator
from tools import save_checkpoint, load_checkpoint, get_memory_use
def main():
batch_size = 64
valid_batch_size = 8
dataset_size = 500
learning_rate = 0.001
weight_decay = 1e-4
epochs = 30
show_frq = 20
negative_size = 10
negative_expand = 1
negative_size_bound = 20
negative_retake = True
load_read_model = False
save_dir = '/cos_person/data/'
torch.backends.cudnn.benchmark = True
dm = DataEmbedding()
dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=negative_size, data_type='train')
valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=400,
data_type='valid')
print(len(dataset))
model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.embedding_size,
hidden_dim=150, tagset_size=50, negative_size=negative_size)
embedding_matrix = torch.Tensor(dm.get_embedding_matrix())
print('before model:' + get_memory_use())
if torch.cuda.is_available():
embedding_matrix = embedding_matrix.cuda()
model = model.cuda()
model.encoder.embedding.weight.data.copy_(embedding_matrix)
print('after model:' + get_memory_use())
train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, drop_last=True)
valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=valid_batch_size, shuffle=True, drop_last=True)
optimizer = optim.Adam(model.parameters(), lr=learning_rate, weight_decay=weight_decay, amsgrad=True)
train_accu_list = []
train_loss_list = []
valid_accu_list = []
valid_loss_list = []
trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer, batch_size=batch_size,
data_size=len(train_loader), threshold_decay=True)
valider = Evaluator(model=model, loader=valid_loader, batch_size=valid_batch_size)
for epoch in range(1, epochs + 1):
print('before:' + get_memory_use())
print('Epoch {} start...'.format(epoch))
model.reset_negative(dataset.negative_size)
trainer.train(epoch=epoch, show_frq=show_frq, accu_list=train_accu_list, loss_list=train_loss_list)
print('train after:' + get_memory_use())
model.reset_negative(valid_dataset.negative_size)
valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=valid_loss_list)
print('valid after:' + get_memory_use())
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
if negative_retake:
if negative_size + negative_expand <= negative_size_bound:
negative_size += negative_expand
del dataset
del train_loader
dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=negative_size)
train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, drop_last=True)
trainer.loader = train_loader
if epochs - epoch <= 5:
load_read_model = True
if load_read_model:
if epoch <= 1:
save_checkpoint(save_dir=save_dir + 'check.pkl', model=model, optimizer=optimizer)
elif valid_accu_list[-1] > valid_accu_list[-2] \
or (valid_accu_list[-1] == valid_accu_list[-2] and valid_loss_list[-1] < valid_loss_list[-2]):
save_checkpoint(save_dir=save_dir + 'check.pkl', model=model, optimizer=optimizer)
else:
checkpoint = load_checkpoint(save_dir + 'check.pkl')
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
trainer.model = model
trainer.optimizer = optimizer
trainer._lr_decay(0.8)
valider.model = model
else:
torch.save(model, save_dir + 'model.pkl')
torch.save(train_loss_list, save_dir + 'train_loss.pkl')
torch.save(train_accu_list, save_dir + 'train_accu.pkl')
torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')
torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')
torch.save(model, save_dir + 'model.pkl')
test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=400, data_type='test')
test_loader = data.DataLoader(dataset=test_dataset, batch_size=valid_batch_size, shuffle=True, drop_last=True)
tester = Evaluator(model=model, loader=test_loader, batch_size=valid_batch_size)
test_accu_list = []
test_loss_list = []
model.reset_negative(test_dataset.negative_size)
tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list)
torch.save(test_loss_list, save_dir + 'test_loss.pkl')
torch.save(test_accu_list, save_dir + 'test_accu.pkl')
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "41f2a5ba0d7a726389936c1ff66a5724209ee99c",
"index": 4099,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n batch_size = 64\n valid_batch_size = 8\n dataset_size = 500\n learning_rate = 0.001\n weight_decay = 0.0001\n epochs = 30\n show_frq = 20\n negative_size = 10\n negative_expand = 1\n negative_size_bound = 20\n negative_retake = True\n load_read_model = False\n save_dir = '/cos_person/data/'\n torch.backends.cudnn.benchmark = True\n dm = DataEmbedding()\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size, data_type='train')\n valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='valid')\n print(len(dataset))\n model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.\n embedding_size, hidden_dim=150, tagset_size=50, negative_size=\n negative_size)\n embedding_matrix = torch.Tensor(dm.get_embedding_matrix())\n print('before model:' + get_memory_use())\n if torch.cuda.is_available():\n embedding_matrix = embedding_matrix.cuda()\n model = model.cuda()\n model.encoder.embedding.weight.data.copy_(embedding_matrix)\n print('after model:' + get_memory_use())\n train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,\n shuffle=True, drop_last=True)\n valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n optimizer = optim.Adam(model.parameters(), lr=learning_rate,\n weight_decay=weight_decay, amsgrad=True)\n train_accu_list = []\n train_loss_list = []\n valid_accu_list = []\n valid_loss_list = []\n trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,\n batch_size=batch_size, data_size=len(train_loader), threshold_decay\n =True)\n valider = Evaluator(model=model, loader=valid_loader, batch_size=\n valid_batch_size)\n for epoch in range(1, epochs + 1):\n print('before:' + get_memory_use())\n print('Epoch {} start...'.format(epoch))\n model.reset_negative(dataset.negative_size)\n trainer.train(epoch=epoch, show_frq=show_frq, accu_list=\n train_accu_list, loss_list=train_loss_list)\n print('train after:' + get_memory_use())\n model.reset_negative(valid_dataset.negative_size)\n valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=\n valid_loss_list)\n print('valid after:' + get_memory_use())\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n if negative_retake:\n if negative_size + negative_expand <= negative_size_bound:\n negative_size += negative_expand\n del dataset\n del train_loader\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size)\n train_loader = data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=True, drop_last=True)\n trainer.loader = train_loader\n if epochs - epoch <= 5:\n load_read_model = True\n if load_read_model:\n if epoch <= 1:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[\n -1] == valid_accu_list[-2] and valid_loss_list[-1\n ] < valid_loss_list[-2]:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n else:\n checkpoint = load_checkpoint(save_dir + 'check.pkl')\n model.load_state_dict(checkpoint['model_state_dict'])\n optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n trainer.model = model\n trainer.optimizer = optimizer\n trainer._lr_decay(0.8)\n valider.model = model\n else:\n torch.save(model, save_dir + 'model.pkl')\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')\n torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')\n torch.save(model, save_dir + 'model.pkl')\n test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='test')\n test_loader = data.DataLoader(dataset=test_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n tester = Evaluator(model=model, loader=test_loader, batch_size=\n valid_batch_size)\n test_accu_list = []\n test_loss_list = []\n model.reset_negative(test_dataset.negative_size)\n tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list\n )\n torch.save(test_loss_list, save_dir + 'test_loss.pkl')\n torch.save(test_accu_list, save_dir + 'test_accu.pkl')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n batch_size = 64\n valid_batch_size = 8\n dataset_size = 500\n learning_rate = 0.001\n weight_decay = 0.0001\n epochs = 30\n show_frq = 20\n negative_size = 10\n negative_expand = 1\n negative_size_bound = 20\n negative_retake = True\n load_read_model = False\n save_dir = '/cos_person/data/'\n torch.backends.cudnn.benchmark = True\n dm = DataEmbedding()\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size, data_type='train')\n valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='valid')\n print(len(dataset))\n model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.\n embedding_size, hidden_dim=150, tagset_size=50, negative_size=\n negative_size)\n embedding_matrix = torch.Tensor(dm.get_embedding_matrix())\n print('before model:' + get_memory_use())\n if torch.cuda.is_available():\n embedding_matrix = embedding_matrix.cuda()\n model = model.cuda()\n model.encoder.embedding.weight.data.copy_(embedding_matrix)\n print('after model:' + get_memory_use())\n train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,\n shuffle=True, drop_last=True)\n valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n optimizer = optim.Adam(model.parameters(), lr=learning_rate,\n weight_decay=weight_decay, amsgrad=True)\n train_accu_list = []\n train_loss_list = []\n valid_accu_list = []\n valid_loss_list = []\n trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,\n batch_size=batch_size, data_size=len(train_loader), threshold_decay\n =True)\n valider = Evaluator(model=model, loader=valid_loader, batch_size=\n valid_batch_size)\n for epoch in range(1, epochs + 1):\n print('before:' + get_memory_use())\n print('Epoch {} start...'.format(epoch))\n model.reset_negative(dataset.negative_size)\n trainer.train(epoch=epoch, show_frq=show_frq, accu_list=\n train_accu_list, loss_list=train_loss_list)\n print('train after:' + get_memory_use())\n model.reset_negative(valid_dataset.negative_size)\n valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=\n valid_loss_list)\n print('valid after:' + get_memory_use())\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n if negative_retake:\n if negative_size + negative_expand <= negative_size_bound:\n negative_size += negative_expand\n del dataset\n del train_loader\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size)\n train_loader = data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=True, drop_last=True)\n trainer.loader = train_loader\n if epochs - epoch <= 5:\n load_read_model = True\n if load_read_model:\n if epoch <= 1:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[\n -1] == valid_accu_list[-2] and valid_loss_list[-1\n ] < valid_loss_list[-2]:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n else:\n checkpoint = load_checkpoint(save_dir + 'check.pkl')\n model.load_state_dict(checkpoint['model_state_dict'])\n optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n trainer.model = model\n trainer.optimizer = optimizer\n trainer._lr_decay(0.8)\n valider.model = model\n else:\n torch.save(model, save_dir + 'model.pkl')\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')\n torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')\n torch.save(model, save_dir + 'model.pkl')\n test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='test')\n test_loader = data.DataLoader(dataset=test_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n tester = Evaluator(model=model, loader=test_loader, batch_size=\n valid_batch_size)\n test_accu_list = []\n test_loss_list = []\n model.reset_negative(test_dataset.negative_size)\n tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list\n )\n torch.save(test_loss_list, save_dir + 'test_loss.pkl')\n torch.save(test_accu_list, save_dir + 'test_accu.pkl')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import torch\nimport torch.optim as optim\nimport torch.nn as nn\nimport torch.utils.data as data\nfrom dataset import InsuranceAnswerDataset, DataEmbedding\nfrom model import Matcher\nfrom tools import Trainer, Evaluator\nfrom tools import save_checkpoint, load_checkpoint, get_memory_use\n\n\ndef main():\n batch_size = 64\n valid_batch_size = 8\n dataset_size = 500\n learning_rate = 0.001\n weight_decay = 0.0001\n epochs = 30\n show_frq = 20\n negative_size = 10\n negative_expand = 1\n negative_size_bound = 20\n negative_retake = True\n load_read_model = False\n save_dir = '/cos_person/data/'\n torch.backends.cudnn.benchmark = True\n dm = DataEmbedding()\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size, data_type='train')\n valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='valid')\n print(len(dataset))\n model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.\n embedding_size, hidden_dim=150, tagset_size=50, negative_size=\n negative_size)\n embedding_matrix = torch.Tensor(dm.get_embedding_matrix())\n print('before model:' + get_memory_use())\n if torch.cuda.is_available():\n embedding_matrix = embedding_matrix.cuda()\n model = model.cuda()\n model.encoder.embedding.weight.data.copy_(embedding_matrix)\n print('after model:' + get_memory_use())\n train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size,\n shuffle=True, drop_last=True)\n valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n optimizer = optim.Adam(model.parameters(), lr=learning_rate,\n weight_decay=weight_decay, amsgrad=True)\n train_accu_list = []\n train_loss_list = []\n valid_accu_list = []\n valid_loss_list = []\n trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer,\n batch_size=batch_size, data_size=len(train_loader), threshold_decay\n =True)\n valider = Evaluator(model=model, loader=valid_loader, batch_size=\n valid_batch_size)\n for epoch in range(1, epochs + 1):\n print('before:' + get_memory_use())\n print('Epoch {} start...'.format(epoch))\n model.reset_negative(dataset.negative_size)\n trainer.train(epoch=epoch, show_frq=show_frq, accu_list=\n train_accu_list, loss_list=train_loss_list)\n print('train after:' + get_memory_use())\n model.reset_negative(valid_dataset.negative_size)\n valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=\n valid_loss_list)\n print('valid after:' + get_memory_use())\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n if negative_retake:\n if negative_size + negative_expand <= negative_size_bound:\n negative_size += negative_expand\n del dataset\n del train_loader\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=negative_size)\n train_loader = data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=True, drop_last=True)\n trainer.loader = train_loader\n if epochs - epoch <= 5:\n load_read_model = True\n if load_read_model:\n if epoch <= 1:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n elif valid_accu_list[-1] > valid_accu_list[-2] or valid_accu_list[\n -1] == valid_accu_list[-2] and valid_loss_list[-1\n ] < valid_loss_list[-2]:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=\n model, optimizer=optimizer)\n else:\n checkpoint = load_checkpoint(save_dir + 'check.pkl')\n model.load_state_dict(checkpoint['model_state_dict'])\n optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n trainer.model = model\n trainer.optimizer = optimizer\n trainer._lr_decay(0.8)\n valider.model = model\n else:\n torch.save(model, save_dir + 'model.pkl')\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')\n torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')\n torch.save(model, save_dir + 'model.pkl')\n test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size,\n negative_size=400, data_type='test')\n test_loader = data.DataLoader(dataset=test_dataset, batch_size=\n valid_batch_size, shuffle=True, drop_last=True)\n tester = Evaluator(model=model, loader=test_loader, batch_size=\n valid_batch_size)\n test_accu_list = []\n test_loss_list = []\n model.reset_negative(test_dataset.negative_size)\n tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list\n )\n torch.save(test_loss_list, save_dir + 'test_loss.pkl')\n torch.save(test_accu_list, save_dir + 'test_accu.pkl')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import torch\nimport torch.optim as optim\nimport torch.nn as nn\nimport torch.utils.data as data\n\nfrom dataset import InsuranceAnswerDataset, DataEmbedding\nfrom model import Matcher\nfrom tools import Trainer, Evaluator\nfrom tools import save_checkpoint, load_checkpoint, get_memory_use\n\n\ndef main():\n batch_size = 64\n valid_batch_size = 8\n dataset_size = 500\n learning_rate = 0.001\n weight_decay = 1e-4\n epochs = 30\n show_frq = 20\n negative_size = 10\n negative_expand = 1\n negative_size_bound = 20\n negative_retake = True\n load_read_model = False\n save_dir = '/cos_person/data/'\n torch.backends.cudnn.benchmark = True\n\n dm = DataEmbedding()\n\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=negative_size, data_type='train')\n valid_dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=400,\n data_type='valid')\n\n print(len(dataset))\n\n model = Matcher(embedding_dim=dm.embedding_dim, vocab_size=dm.embedding_size,\n hidden_dim=150, tagset_size=50, negative_size=negative_size)\n\n embedding_matrix = torch.Tensor(dm.get_embedding_matrix())\n print('before model:' + get_memory_use())\n if torch.cuda.is_available():\n embedding_matrix = embedding_matrix.cuda()\n model = model.cuda()\n model.encoder.embedding.weight.data.copy_(embedding_matrix)\n print('after model:' + get_memory_use())\n\n train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, drop_last=True)\n valid_loader = data.DataLoader(dataset=valid_dataset, batch_size=valid_batch_size, shuffle=True, drop_last=True)\n\n optimizer = optim.Adam(model.parameters(), lr=learning_rate, weight_decay=weight_decay, amsgrad=True)\n\n train_accu_list = []\n train_loss_list = []\n valid_accu_list = []\n valid_loss_list = []\n\n trainer = Trainer(model=model, loader=train_loader, optimizer=optimizer, batch_size=batch_size,\n data_size=len(train_loader), threshold_decay=True)\n valider = Evaluator(model=model, loader=valid_loader, batch_size=valid_batch_size)\n for epoch in range(1, epochs + 1):\n print('before:' + get_memory_use())\n print('Epoch {} start...'.format(epoch))\n model.reset_negative(dataset.negative_size)\n trainer.train(epoch=epoch, show_frq=show_frq, accu_list=train_accu_list, loss_list=train_loss_list)\n print('train after:' + get_memory_use())\n model.reset_negative(valid_dataset.negative_size)\n valider.evaluate(epoch=epoch, accu_list=valid_accu_list, loss_list=valid_loss_list)\n print('valid after:' + get_memory_use())\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n if negative_retake:\n if negative_size + negative_expand <= negative_size_bound:\n negative_size += negative_expand\n del dataset\n del train_loader\n dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=negative_size)\n train_loader = data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, drop_last=True)\n trainer.loader = train_loader\n if epochs - epoch <= 5:\n load_read_model = True\n if load_read_model:\n if epoch <= 1:\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=model, optimizer=optimizer)\n elif valid_accu_list[-1] > valid_accu_list[-2] \\\n or (valid_accu_list[-1] == valid_accu_list[-2] and valid_loss_list[-1] < valid_loss_list[-2]):\n save_checkpoint(save_dir=save_dir + 'check.pkl', model=model, optimizer=optimizer)\n else:\n checkpoint = load_checkpoint(save_dir + 'check.pkl')\n model.load_state_dict(checkpoint['model_state_dict'])\n optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n trainer.model = model\n trainer.optimizer = optimizer\n trainer._lr_decay(0.8)\n valider.model = model\n else:\n torch.save(model, save_dir + 'model.pkl')\n\n torch.save(train_loss_list, save_dir + 'train_loss.pkl')\n torch.save(train_accu_list, save_dir + 'train_accu.pkl')\n torch.save(valid_loss_list, save_dir + 'valid_loss.pkl')\n torch.save(valid_accu_list, save_dir + 'valid_accu.pkl')\n torch.save(model, save_dir + 'model.pkl')\n\n test_dataset = InsuranceAnswerDataset(dataset_size=dataset_size, negative_size=400, data_type='test')\n test_loader = data.DataLoader(dataset=test_dataset, batch_size=valid_batch_size, shuffle=True, drop_last=True)\n tester = Evaluator(model=model, loader=test_loader, batch_size=valid_batch_size)\n test_accu_list = []\n test_loss_list = []\n model.reset_negative(test_dataset.negative_size)\n tester.evaluate(epoch=1, accu_list=test_accu_list, loss_list=test_loss_list)\n torch.save(test_loss_list, save_dir + 'test_loss.pkl')\n torch.save(test_accu_list, save_dir + 'test_accu.pkl')\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def raizCubica(numero):
r = pow(numero,(1/3))
return r
numeros = []
raices = []
for x in range(5):
numeros.insert(x, float(input("Ingrese Numero: ")))
raices.insert(x, round(raizCubica(numeros[x]),3))
print("Numeros: ", numeros)
print("Raices: ", raices)
|
normal
|
{
"blob_id": "180f7f0ade9770c6669680bd13ac8f2fd55cc8c7",
"index": 357,
"step-1": "<mask token>\n",
"step-2": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\n<mask token>\n",
"step-3": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\n<mask token>\nfor x in range(5):\n numeros.insert(x, float(input('Ingrese Numero: ')))\n raices.insert(x, round(raizCubica(numeros[x]), 3))\nprint('Numeros: ', numeros)\nprint('Raices: ', raices)\n",
"step-4": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\nnumeros = []\nraices = []\nfor x in range(5):\n numeros.insert(x, float(input('Ingrese Numero: ')))\n raices.insert(x, round(raizCubica(numeros[x]), 3))\nprint('Numeros: ', numeros)\nprint('Raices: ', raices)\n",
"step-5": "def raizCubica(numero):\n r = pow(numero,(1/3))\n return r\n\nnumeros = []\nraices = []\n\nfor x in range(5):\n numeros.insert(x, float(input(\"Ingrese Numero: \")))\n raices.insert(x, round(raizCubica(numeros[x]),3))\n\nprint(\"Numeros: \", numeros)\nprint(\"Raices: \", raices)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
<|reserved_special_token_0|>
@main.command(help=
"""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help=
'The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error(
'Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join(v.strip().replace(',', ' ') for v in route.values())
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
@main.command(help=
"""Reads the entire destination list of of the given airport
returns the name of the airport and url associated with it.
Sample usage: fio destinations --from-airport PHX"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click_log.simple_verbosity_option(logger)
def destinations(from_airport):
if not from_airport:
logger.error(
'Unable to get destinations without an aiport code. Use fio destinations --help'
)
return
destinations = FlightScraper().get_destinations(from_airport.upper())
logger.info(json.dumps(destinations, indent=4))
@main.command(help=
"""Reads the entire destination list of of the given airport and crawls
each destination to obtain the list of routes. The output files will
stored in the given folder. Sample usage:
fio all-routes --from-airport PHX --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def all_routes(from_airport, output):
if not from_airport:
logger.error(
'Unable to get all routes without an aiport code. Use fio all_routes --help'
)
return
airport = from_airport.upper()
scraper = FlightScraper()
airport_path = os.path.join(output, airport)
logger.info(f'Creating {airport_path}')
os.makedirs(airport_path, exist_ok=True)
for destination, routes in scraper.get_routes(airport):
write_csv(airport_path, destination, routes)
@main.command(help=
"""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help=
'The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error(
'Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join(v.strip().replace(',', ' ') for v in route.values())
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
click_log.basic_config(logger)
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
@main.command(help=
"""Reads the entire destination list of of the given airport
returns the name of the airport and url associated with it.
Sample usage: fio destinations --from-airport PHX"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click_log.simple_verbosity_option(logger)
def destinations(from_airport):
if not from_airport:
logger.error(
'Unable to get destinations without an aiport code. Use fio destinations --help'
)
return
destinations = FlightScraper().get_destinations(from_airport.upper())
logger.info(json.dumps(destinations, indent=4))
@main.command(help=
"""Reads the entire destination list of of the given airport and crawls
each destination to obtain the list of routes. The output files will
stored in the given folder. Sample usage:
fio all-routes --from-airport PHX --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def all_routes(from_airport, output):
if not from_airport:
logger.error(
'Unable to get all routes without an aiport code. Use fio all_routes --help'
)
return
airport = from_airport.upper()
scraper = FlightScraper()
airport_path = os.path.join(output, airport)
logger.info(f'Creating {airport_path}')
os.makedirs(airport_path, exist_ok=True)
for destination, routes in scraper.get_routes(airport):
write_csv(airport_path, destination, routes)
@main.command(help=
"""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help=
'The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error(
'Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join(v.strip().replace(',', ' ') for v in route.values())
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__name__)
click_log.basic_config(logger)
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
@main.command(help=
"""Reads the entire destination list of of the given airport
returns the name of the airport and url associated with it.
Sample usage: fio destinations --from-airport PHX"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click_log.simple_verbosity_option(logger)
def destinations(from_airport):
if not from_airport:
logger.error(
'Unable to get destinations without an aiport code. Use fio destinations --help'
)
return
destinations = FlightScraper().get_destinations(from_airport.upper())
logger.info(json.dumps(destinations, indent=4))
@main.command(help=
"""Reads the entire destination list of of the given airport and crawls
each destination to obtain the list of routes. The output files will
stored in the given folder. Sample usage:
fio all-routes --from-airport PHX --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def all_routes(from_airport, output):
if not from_airport:
logger.error(
'Unable to get all routes without an aiport code. Use fio all_routes --help'
)
return
airport = from_airport.upper()
scraper = FlightScraper()
airport_path = os.path.join(output, airport)
logger.info(f'Creating {airport_path}')
os.makedirs(airport_path, exist_ok=True)
for destination, routes in scraper.get_routes(airport):
write_csv(airport_path, destination, routes)
@main.command(help=
"""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
"""
)
@click.option('--from-airport', '-a', help=
'The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help=
'The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help=
'The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error(
'Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join(v.strip().replace(',', ' ') for v in route.values())
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os
import time
import json
import click
import click_log
import logging
from flightsio.scraper import FlightScraper
logger = logging.getLogger(__name__)
click_log.basic_config(logger)
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
@main.command(help="""Reads the entire destination list of of the given airport
returns the name of the airport and url associated with it.
Sample usage: fio destinations --from-airport PHX""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click_log.simple_verbosity_option(logger)
def destinations(from_airport):
if not from_airport:
logger.error('Unable to get destinations without an aiport code. Use fio destinations --help')
return
destinations = FlightScraper().get_destinations(from_airport.upper())
logger.info(json.dumps(destinations, indent=4))
@main.command(help="""Reads the entire destination list of of the given airport and crawls
each destination to obtain the list of routes. The output files will
stored in the given folder. Sample usage:
fio all-routes --from-airport PHX --output ./out
""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def all_routes(from_airport, output):
if not from_airport:
logger.error('Unable to get all routes without an aiport code. Use fio all_routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
airport_path = os.path.join(output, airport)
logger.info(f'Creating {airport_path}')
os.makedirs(airport_path, exist_ok=True)
for destination, routes in scraper.get_routes(airport):
write_csv(airport_path, destination, routes)
@main.command(help="""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help='The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error('Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join((v.strip().replace(',', ' ') for v in route.values()))
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "234aad868ea71bbe476b303bcff37221820f1d90",
"index": 4310,
"step-1": "<mask token>\n\n\n@click.group()\ndef main():\n \"\"\"\n An empty click group, required in order to bundle the other commands.\n \"\"\"\n pass\n\n\n<mask token>\n\n\n@main.command(help=\n \"\"\"Reads the route list between a source airport and a destination airport and\n writes the result in the output folder. Sample usage:\n fio routes --from-airport PHX --to-airport OKC --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--to-airport', '-b', help=\n 'The three letter code of the destination airport. I.e. OKC')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef routes(from_airport, to_airport, output):\n if not from_airport or not to_airport:\n logger.error(\n 'Unable to get routes without aiport codes. Use fio routes --help')\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n route_path = os.path.join(output, 'single_routes')\n logger.info(f'Creating {route_path}')\n os.makedirs(route_path, exist_ok=True)\n destination_link = scraper.get_fm_link(from_airport, to_airport)\n name, routes = scraper.get_flight_foutes(to_airport, destination_link)\n write_csv(route_path, f'{from_airport}_{to_airport}', routes)\n\n\ndef write_csv(path, destination, routes):\n if not len(routes):\n print(f'{destination} has no routes. Nothing to write.')\n return\n header = ','.join(routes[0])\n with open(os.path.join(path, f'{destination}.csv'), 'w') as f:\n f.write(header + '\\n')\n for route in routes:\n row = ','.join(v.strip().replace(',', ' ') for v in route.values())\n f.write(row + '\\n')\n\n\ndef write_json(destination_path, flight, routes):\n with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:\n f.write(json.dumps(routes, indent=4))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@click.group()\ndef main():\n \"\"\"\n An empty click group, required in order to bundle the other commands.\n \"\"\"\n pass\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport\n returns the name of the airport and url associated with it.\n Sample usage: fio destinations --from-airport PHX\"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click_log.simple_verbosity_option(logger)\ndef destinations(from_airport):\n if not from_airport:\n logger.error(\n 'Unable to get destinations without an aiport code. Use fio destinations --help'\n )\n return\n destinations = FlightScraper().get_destinations(from_airport.upper())\n logger.info(json.dumps(destinations, indent=4))\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport and crawls\n each destination to obtain the list of routes. The output files will\n stored in the given folder. Sample usage:\n fio all-routes --from-airport PHX --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef all_routes(from_airport, output):\n if not from_airport:\n logger.error(\n 'Unable to get all routes without an aiport code. Use fio all_routes --help'\n )\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n airport_path = os.path.join(output, airport)\n logger.info(f'Creating {airport_path}')\n os.makedirs(airport_path, exist_ok=True)\n for destination, routes in scraper.get_routes(airport):\n write_csv(airport_path, destination, routes)\n\n\n@main.command(help=\n \"\"\"Reads the route list between a source airport and a destination airport and\n writes the result in the output folder. Sample usage:\n fio routes --from-airport PHX --to-airport OKC --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--to-airport', '-b', help=\n 'The three letter code of the destination airport. I.e. OKC')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef routes(from_airport, to_airport, output):\n if not from_airport or not to_airport:\n logger.error(\n 'Unable to get routes without aiport codes. Use fio routes --help')\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n route_path = os.path.join(output, 'single_routes')\n logger.info(f'Creating {route_path}')\n os.makedirs(route_path, exist_ok=True)\n destination_link = scraper.get_fm_link(from_airport, to_airport)\n name, routes = scraper.get_flight_foutes(to_airport, destination_link)\n write_csv(route_path, f'{from_airport}_{to_airport}', routes)\n\n\ndef write_csv(path, destination, routes):\n if not len(routes):\n print(f'{destination} has no routes. Nothing to write.')\n return\n header = ','.join(routes[0])\n with open(os.path.join(path, f'{destination}.csv'), 'w') as f:\n f.write(header + '\\n')\n for route in routes:\n row = ','.join(v.strip().replace(',', ' ') for v in route.values())\n f.write(row + '\\n')\n\n\ndef write_json(destination_path, flight, routes):\n with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:\n f.write(json.dumps(routes, indent=4))\n\n\n<mask token>\n",
"step-3": "<mask token>\nclick_log.basic_config(logger)\n\n\n@click.group()\ndef main():\n \"\"\"\n An empty click group, required in order to bundle the other commands.\n \"\"\"\n pass\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport\n returns the name of the airport and url associated with it.\n Sample usage: fio destinations --from-airport PHX\"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click_log.simple_verbosity_option(logger)\ndef destinations(from_airport):\n if not from_airport:\n logger.error(\n 'Unable to get destinations without an aiport code. Use fio destinations --help'\n )\n return\n destinations = FlightScraper().get_destinations(from_airport.upper())\n logger.info(json.dumps(destinations, indent=4))\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport and crawls\n each destination to obtain the list of routes. The output files will\n stored in the given folder. Sample usage:\n fio all-routes --from-airport PHX --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef all_routes(from_airport, output):\n if not from_airport:\n logger.error(\n 'Unable to get all routes without an aiport code. Use fio all_routes --help'\n )\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n airport_path = os.path.join(output, airport)\n logger.info(f'Creating {airport_path}')\n os.makedirs(airport_path, exist_ok=True)\n for destination, routes in scraper.get_routes(airport):\n write_csv(airport_path, destination, routes)\n\n\n@main.command(help=\n \"\"\"Reads the route list between a source airport and a destination airport and\n writes the result in the output folder. Sample usage:\n fio routes --from-airport PHX --to-airport OKC --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--to-airport', '-b', help=\n 'The three letter code of the destination airport. I.e. OKC')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef routes(from_airport, to_airport, output):\n if not from_airport or not to_airport:\n logger.error(\n 'Unable to get routes without aiport codes. Use fio routes --help')\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n route_path = os.path.join(output, 'single_routes')\n logger.info(f'Creating {route_path}')\n os.makedirs(route_path, exist_ok=True)\n destination_link = scraper.get_fm_link(from_airport, to_airport)\n name, routes = scraper.get_flight_foutes(to_airport, destination_link)\n write_csv(route_path, f'{from_airport}_{to_airport}', routes)\n\n\ndef write_csv(path, destination, routes):\n if not len(routes):\n print(f'{destination} has no routes. Nothing to write.')\n return\n header = ','.join(routes[0])\n with open(os.path.join(path, f'{destination}.csv'), 'w') as f:\n f.write(header + '\\n')\n for route in routes:\n row = ','.join(v.strip().replace(',', ' ') for v in route.values())\n f.write(row + '\\n')\n\n\ndef write_json(destination_path, flight, routes):\n with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:\n f.write(json.dumps(routes, indent=4))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nlogger = logging.getLogger(__name__)\nclick_log.basic_config(logger)\n\n\n@click.group()\ndef main():\n \"\"\"\n An empty click group, required in order to bundle the other commands.\n \"\"\"\n pass\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport\n returns the name of the airport and url associated with it.\n Sample usage: fio destinations --from-airport PHX\"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click_log.simple_verbosity_option(logger)\ndef destinations(from_airport):\n if not from_airport:\n logger.error(\n 'Unable to get destinations without an aiport code. Use fio destinations --help'\n )\n return\n destinations = FlightScraper().get_destinations(from_airport.upper())\n logger.info(json.dumps(destinations, indent=4))\n\n\n@main.command(help=\n \"\"\"Reads the entire destination list of of the given airport and crawls\n each destination to obtain the list of routes. The output files will\n stored in the given folder. Sample usage:\n fio all-routes --from-airport PHX --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef all_routes(from_airport, output):\n if not from_airport:\n logger.error(\n 'Unable to get all routes without an aiport code. Use fio all_routes --help'\n )\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n airport_path = os.path.join(output, airport)\n logger.info(f'Creating {airport_path}')\n os.makedirs(airport_path, exist_ok=True)\n for destination, routes in scraper.get_routes(airport):\n write_csv(airport_path, destination, routes)\n\n\n@main.command(help=\n \"\"\"Reads the route list between a source airport and a destination airport and\n writes the result in the output folder. Sample usage:\n fio routes --from-airport PHX --to-airport OKC --output ./out\n \"\"\"\n )\n@click.option('--from-airport', '-a', help=\n 'The three letter code of the source airport. I.e. PHX')\n@click.option('--to-airport', '-b', help=\n 'The three letter code of the destination airport. I.e. OKC')\n@click.option('--output', '-o', help=\n 'The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef routes(from_airport, to_airport, output):\n if not from_airport or not to_airport:\n logger.error(\n 'Unable to get routes without aiport codes. Use fio routes --help')\n return\n airport = from_airport.upper()\n scraper = FlightScraper()\n route_path = os.path.join(output, 'single_routes')\n logger.info(f'Creating {route_path}')\n os.makedirs(route_path, exist_ok=True)\n destination_link = scraper.get_fm_link(from_airport, to_airport)\n name, routes = scraper.get_flight_foutes(to_airport, destination_link)\n write_csv(route_path, f'{from_airport}_{to_airport}', routes)\n\n\ndef write_csv(path, destination, routes):\n if not len(routes):\n print(f'{destination} has no routes. Nothing to write.')\n return\n header = ','.join(routes[0])\n with open(os.path.join(path, f'{destination}.csv'), 'w') as f:\n f.write(header + '\\n')\n for route in routes:\n row = ','.join(v.strip().replace(',', ' ') for v in route.values())\n f.write(row + '\\n')\n\n\ndef write_json(destination_path, flight, routes):\n with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:\n f.write(json.dumps(routes, indent=4))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import os\nimport time\nimport json\nimport click\nimport click_log\nimport logging\n\nfrom flightsio.scraper import FlightScraper\n\nlogger = logging.getLogger(__name__)\nclick_log.basic_config(logger)\n\n\n@click.group()\ndef main():\n \"\"\"\n An empty click group, required in order to bundle the other commands.\n \"\"\"\n pass\n\n\n@main.command(help=\"\"\"Reads the entire destination list of of the given airport\n returns the name of the airport and url associated with it.\n Sample usage: fio destinations --from-airport PHX\"\"\")\n@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')\n@click_log.simple_verbosity_option(logger)\ndef destinations(from_airport):\n\n if not from_airport:\n logger.error('Unable to get destinations without an aiport code. Use fio destinations --help')\n return\n\n destinations = FlightScraper().get_destinations(from_airport.upper())\n logger.info(json.dumps(destinations, indent=4))\n\n\n@main.command(help=\"\"\"Reads the entire destination list of of the given airport and crawls\n each destination to obtain the list of routes. The output files will\n stored in the given folder. Sample usage:\n fio all-routes --from-airport PHX --output ./out\n \"\"\")\n@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')\n@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef all_routes(from_airport, output):\n\n if not from_airport:\n logger.error('Unable to get all routes without an aiport code. Use fio all_routes --help')\n return\n\n airport = from_airport.upper()\n\n scraper = FlightScraper()\n airport_path = os.path.join(output, airport)\n logger.info(f'Creating {airport_path}')\n os.makedirs(airport_path, exist_ok=True)\n\n for destination, routes in scraper.get_routes(airport):\n write_csv(airport_path, destination, routes)\n\n\n@main.command(help=\"\"\"Reads the route list between a source airport and a destination airport and\n writes the result in the output folder. Sample usage:\n fio routes --from-airport PHX --to-airport OKC --output ./out\n \"\"\")\n@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')\n@click.option('--to-airport', '-b', help='The three letter code of the destination airport. I.e. OKC')\n@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')\n@click_log.simple_verbosity_option(logger)\ndef routes(from_airport, to_airport, output):\n\n if not from_airport or not to_airport:\n logger.error('Unable to get routes without aiport codes. Use fio routes --help')\n return\n\n airport = from_airport.upper()\n\n scraper = FlightScraper()\n route_path = os.path.join(output, 'single_routes')\n logger.info(f'Creating {route_path}')\n os.makedirs(route_path, exist_ok=True)\n\n destination_link = scraper.get_fm_link(from_airport, to_airport)\n name, routes = scraper.get_flight_foutes(to_airport, destination_link)\n\n write_csv(route_path, f'{from_airport}_{to_airport}', routes)\n\n\ndef write_csv(path, destination, routes):\n\n if not len(routes):\n print(f'{destination} has no routes. Nothing to write.')\n return\n\n header = ','.join(routes[0])\n with open(os.path.join(path, f'{destination}.csv'), 'w') as f:\n f.write(header + '\\n')\n for route in routes:\n row = ','.join((v.strip().replace(',', ' ') for v in route.values()))\n f.write(row + '\\n')\n\n\ndef write_json(destination_path, flight, routes):\n\n with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:\n f.write(json.dumps(routes, indent=4))\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
import os, pygame
import sys
from os import path
from random import choice
WIDTH = 1000
HEIGHT = 800
FPS = 60
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
GRAY80 = (204, 204, 204)
GRAY = (26, 26, 26)
screen = pygame.display.set_mode((1000, 800))
game_folder = os.path.dirname(__file__)
img_folder = os.path.join(game_folder, "img")
def draw_text(surf, text, size, x, y):
font_name = pygame.font.match_font('OCR A Extended')
font = pygame.font.Font(font_name, size)
text_surface = font.render(text, True, WHITE)
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
surf.blit(text_surface, text_rect)
def button(msg,x,y,w,h,ic,ac,action=None):
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
print(click)
if x+w > mouse[0] > x and y+h > mouse[1] > y:
pygame.draw.rect(screen, ac,(x,y,w,h))
if click[0] == 1 and action != None:
if action == quit:
pygame.quit()
quit()
else:
pygame.draw.rect(screen, ic,(x,y,w,h))
def main():
# Initialise screen
pygame.init()
pygame.mixer.init()
screen = pygame.display.set_mode((1000, 800))
pygame.display.set_caption('Credits')
# Fill background
background = pygame.image.load(os.path.join(img_folder, "STARS1.jpg")).convert_alpha()
clock = pygame.time.Clock()
start_ticks=pygame.time.get_ticks()
screen.blit(background, (0, 0))
pygame.display.flip()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
quit()
screen.blit(background, (0, 0))
pygame.draw.rect(screen, GRAY,(400,650,190,60))
draw_text(screen, "Credits", 60, 500, 100)
draw_text(screen, "Vincent", 30, 500, 250)
draw_text(screen, "Chevery", 30, 500, 330)
draw_text(screen, "Charlie", 30, 500, 410)
draw_text(screen, "Julian", 30, 500, 490)
draw_text(screen, "Sheriyar", 30, 500, 570)
draw_text(screen, "Julian", 30, 500, 650)
mouse = pygame.mouse.get_pos()
if 400+190 > mouse[0] > 400 and 650+60 > mouse[1] > 650:
pygame.draw.rect(screen, GRAY80,(400,650,190,60))
else:
pygame.draw.rect(screen, GRAY,(400,650,190,60))
draw_text(screen, "EXIT", 40, 488, 660)
#screen.blit(arrow, imagerect)
button("EXIT",400,650,190,60,GRAY,GRAY80,quit)
pygame.display.flip()
if __name__ == '__main__':
main()
pygame.quit()
|
normal
|
{
"blob_id": "7301a521586049ebb5e8e49b604cc96e3acc1fe9",
"index": 3512,
"step-1": "<mask token>\n\n\ndef draw_text(surf, text, size, x, y):\n font_name = pygame.font.match_font('OCR A Extended')\n font = pygame.font.Font(font_name, size)\n text_surface = font.render(text, True, WHITE)\n text_rect = text_surface.get_rect()\n text_rect.midtop = x, y\n surf.blit(text_surface, text_rect)\n\n\ndef button(msg, x, y, w, h, ic, ac, action=None):\n mouse = pygame.mouse.get_pos()\n click = pygame.mouse.get_pressed()\n print(click)\n if x + w > mouse[0] > x and y + h > mouse[1] > y:\n pygame.draw.rect(screen, ac, (x, y, w, h))\n if click[0] == 1 and action != None:\n if action == quit:\n pygame.quit()\n quit()\n else:\n pygame.draw.rect(screen, ic, (x, y, w, h))\n\n\ndef main():\n pygame.init()\n pygame.mixer.init()\n screen = pygame.display.set_mode((1000, 800))\n pygame.display.set_caption('Credits')\n background = pygame.image.load(os.path.join(img_folder, 'STARS1.jpg')\n ).convert_alpha()\n clock = pygame.time.Clock()\n start_ticks = pygame.time.get_ticks()\n screen.blit(background, (0, 0))\n pygame.display.flip()\n running = True\n while running:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return\n quit()\n screen.blit(background, (0, 0))\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'Credits', 60, 500, 100)\n draw_text(screen, 'Vincent', 30, 500, 250)\n draw_text(screen, 'Chevery', 30, 500, 330)\n draw_text(screen, 'Charlie', 30, 500, 410)\n draw_text(screen, 'Julian', 30, 500, 490)\n draw_text(screen, 'Sheriyar', 30, 500, 570)\n draw_text(screen, 'Julian', 30, 500, 650)\n mouse = pygame.mouse.get_pos()\n if 400 + 190 > mouse[0] > 400 and 650 + 60 > mouse[1] > 650:\n pygame.draw.rect(screen, GRAY80, (400, 650, 190, 60))\n else:\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'EXIT', 40, 488, 660)\n button('EXIT', 400, 650, 190, 60, GRAY, GRAY80, quit)\n pygame.display.flip()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef draw_text(surf, text, size, x, y):\n font_name = pygame.font.match_font('OCR A Extended')\n font = pygame.font.Font(font_name, size)\n text_surface = font.render(text, True, WHITE)\n text_rect = text_surface.get_rect()\n text_rect.midtop = x, y\n surf.blit(text_surface, text_rect)\n\n\ndef button(msg, x, y, w, h, ic, ac, action=None):\n mouse = pygame.mouse.get_pos()\n click = pygame.mouse.get_pressed()\n print(click)\n if x + w > mouse[0] > x and y + h > mouse[1] > y:\n pygame.draw.rect(screen, ac, (x, y, w, h))\n if click[0] == 1 and action != None:\n if action == quit:\n pygame.quit()\n quit()\n else:\n pygame.draw.rect(screen, ic, (x, y, w, h))\n\n\ndef main():\n pygame.init()\n pygame.mixer.init()\n screen = pygame.display.set_mode((1000, 800))\n pygame.display.set_caption('Credits')\n background = pygame.image.load(os.path.join(img_folder, 'STARS1.jpg')\n ).convert_alpha()\n clock = pygame.time.Clock()\n start_ticks = pygame.time.get_ticks()\n screen.blit(background, (0, 0))\n pygame.display.flip()\n running = True\n while running:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return\n quit()\n screen.blit(background, (0, 0))\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'Credits', 60, 500, 100)\n draw_text(screen, 'Vincent', 30, 500, 250)\n draw_text(screen, 'Chevery', 30, 500, 330)\n draw_text(screen, 'Charlie', 30, 500, 410)\n draw_text(screen, 'Julian', 30, 500, 490)\n draw_text(screen, 'Sheriyar', 30, 500, 570)\n draw_text(screen, 'Julian', 30, 500, 650)\n mouse = pygame.mouse.get_pos()\n if 400 + 190 > mouse[0] > 400 and 650 + 60 > mouse[1] > 650:\n pygame.draw.rect(screen, GRAY80, (400, 650, 190, 60))\n else:\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'EXIT', 40, 488, 660)\n button('EXIT', 400, 650, 190, 60, GRAY, GRAY80, quit)\n pygame.display.flip()\n\n\nif __name__ == '__main__':\n main()\n pygame.quit()\n",
"step-3": "<mask token>\nWIDTH = 1000\nHEIGHT = 800\nFPS = 60\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nRED = 255, 0, 0\nGREEN = 0, 255, 0\nBLUE = 0, 0, 255\nYELLOW = 255, 255, 0\nGRAY80 = 204, 204, 204\nGRAY = 26, 26, 26\nscreen = pygame.display.set_mode((1000, 800))\ngame_folder = os.path.dirname(__file__)\nimg_folder = os.path.join(game_folder, 'img')\n\n\ndef draw_text(surf, text, size, x, y):\n font_name = pygame.font.match_font('OCR A Extended')\n font = pygame.font.Font(font_name, size)\n text_surface = font.render(text, True, WHITE)\n text_rect = text_surface.get_rect()\n text_rect.midtop = x, y\n surf.blit(text_surface, text_rect)\n\n\ndef button(msg, x, y, w, h, ic, ac, action=None):\n mouse = pygame.mouse.get_pos()\n click = pygame.mouse.get_pressed()\n print(click)\n if x + w > mouse[0] > x and y + h > mouse[1] > y:\n pygame.draw.rect(screen, ac, (x, y, w, h))\n if click[0] == 1 and action != None:\n if action == quit:\n pygame.quit()\n quit()\n else:\n pygame.draw.rect(screen, ic, (x, y, w, h))\n\n\ndef main():\n pygame.init()\n pygame.mixer.init()\n screen = pygame.display.set_mode((1000, 800))\n pygame.display.set_caption('Credits')\n background = pygame.image.load(os.path.join(img_folder, 'STARS1.jpg')\n ).convert_alpha()\n clock = pygame.time.Clock()\n start_ticks = pygame.time.get_ticks()\n screen.blit(background, (0, 0))\n pygame.display.flip()\n running = True\n while running:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return\n quit()\n screen.blit(background, (0, 0))\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'Credits', 60, 500, 100)\n draw_text(screen, 'Vincent', 30, 500, 250)\n draw_text(screen, 'Chevery', 30, 500, 330)\n draw_text(screen, 'Charlie', 30, 500, 410)\n draw_text(screen, 'Julian', 30, 500, 490)\n draw_text(screen, 'Sheriyar', 30, 500, 570)\n draw_text(screen, 'Julian', 30, 500, 650)\n mouse = pygame.mouse.get_pos()\n if 400 + 190 > mouse[0] > 400 and 650 + 60 > mouse[1] > 650:\n pygame.draw.rect(screen, GRAY80, (400, 650, 190, 60))\n else:\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'EXIT', 40, 488, 660)\n button('EXIT', 400, 650, 190, 60, GRAY, GRAY80, quit)\n pygame.display.flip()\n\n\nif __name__ == '__main__':\n main()\n pygame.quit()\n",
"step-4": "import os, pygame\nimport sys\nfrom os import path\nfrom random import choice\nWIDTH = 1000\nHEIGHT = 800\nFPS = 60\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nRED = 255, 0, 0\nGREEN = 0, 255, 0\nBLUE = 0, 0, 255\nYELLOW = 255, 255, 0\nGRAY80 = 204, 204, 204\nGRAY = 26, 26, 26\nscreen = pygame.display.set_mode((1000, 800))\ngame_folder = os.path.dirname(__file__)\nimg_folder = os.path.join(game_folder, 'img')\n\n\ndef draw_text(surf, text, size, x, y):\n font_name = pygame.font.match_font('OCR A Extended')\n font = pygame.font.Font(font_name, size)\n text_surface = font.render(text, True, WHITE)\n text_rect = text_surface.get_rect()\n text_rect.midtop = x, y\n surf.blit(text_surface, text_rect)\n\n\ndef button(msg, x, y, w, h, ic, ac, action=None):\n mouse = pygame.mouse.get_pos()\n click = pygame.mouse.get_pressed()\n print(click)\n if x + w > mouse[0] > x and y + h > mouse[1] > y:\n pygame.draw.rect(screen, ac, (x, y, w, h))\n if click[0] == 1 and action != None:\n if action == quit:\n pygame.quit()\n quit()\n else:\n pygame.draw.rect(screen, ic, (x, y, w, h))\n\n\ndef main():\n pygame.init()\n pygame.mixer.init()\n screen = pygame.display.set_mode((1000, 800))\n pygame.display.set_caption('Credits')\n background = pygame.image.load(os.path.join(img_folder, 'STARS1.jpg')\n ).convert_alpha()\n clock = pygame.time.Clock()\n start_ticks = pygame.time.get_ticks()\n screen.blit(background, (0, 0))\n pygame.display.flip()\n running = True\n while running:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return\n quit()\n screen.blit(background, (0, 0))\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'Credits', 60, 500, 100)\n draw_text(screen, 'Vincent', 30, 500, 250)\n draw_text(screen, 'Chevery', 30, 500, 330)\n draw_text(screen, 'Charlie', 30, 500, 410)\n draw_text(screen, 'Julian', 30, 500, 490)\n draw_text(screen, 'Sheriyar', 30, 500, 570)\n draw_text(screen, 'Julian', 30, 500, 650)\n mouse = pygame.mouse.get_pos()\n if 400 + 190 > mouse[0] > 400 and 650 + 60 > mouse[1] > 650:\n pygame.draw.rect(screen, GRAY80, (400, 650, 190, 60))\n else:\n pygame.draw.rect(screen, GRAY, (400, 650, 190, 60))\n draw_text(screen, 'EXIT', 40, 488, 660)\n button('EXIT', 400, 650, 190, 60, GRAY, GRAY80, quit)\n pygame.display.flip()\n\n\nif __name__ == '__main__':\n main()\n pygame.quit()\n",
"step-5": "import os, pygame\r\nimport sys\r\nfrom os import path\r\nfrom random import choice\r\n\r\nWIDTH = 1000\r\nHEIGHT = 800\r\nFPS = 60\r\n\r\nBLACK = (0, 0, 0)\r\nWHITE = (255, 255, 255)\r\nRED = (255, 0, 0)\r\nGREEN = (0, 255, 0)\r\nBLUE = (0, 0, 255)\r\nYELLOW = (255, 255, 0)\r\nGRAY80 = (204, 204, 204)\r\nGRAY = (26, 26, 26)\r\n\r\n\r\nscreen = pygame.display.set_mode((1000, 800))\r\ngame_folder = os.path.dirname(__file__)\r\nimg_folder = os.path.join(game_folder, \"img\")\r\n\r\ndef draw_text(surf, text, size, x, y):\r\n font_name = pygame.font.match_font('OCR A Extended')\r\n font = pygame.font.Font(font_name, size)\r\n text_surface = font.render(text, True, WHITE)\r\n text_rect = text_surface.get_rect()\r\n text_rect.midtop = (x, y)\r\n surf.blit(text_surface, text_rect)\r\n\r\ndef button(msg,x,y,w,h,ic,ac,action=None):\r\n mouse = pygame.mouse.get_pos()\r\n click = pygame.mouse.get_pressed()\r\n print(click)\r\n\r\n if x+w > mouse[0] > x and y+h > mouse[1] > y:\r\n pygame.draw.rect(screen, ac,(x,y,w,h))\r\n if click[0] == 1 and action != None:\r\n if action == quit:\r\n pygame.quit()\r\n quit()\r\n else:\r\n pygame.draw.rect(screen, ic,(x,y,w,h))\r\n\r\ndef main():\r\n # Initialise screen\r\n pygame.init()\r\n pygame.mixer.init()\r\n screen = pygame.display.set_mode((1000, 800))\r\n pygame.display.set_caption('Credits')\r\n\r\n # Fill background\r\n background = pygame.image.load(os.path.join(img_folder, \"STARS1.jpg\")).convert_alpha()\r\n clock = pygame.time.Clock()\r\n start_ticks=pygame.time.get_ticks()\r\n screen.blit(background, (0, 0))\r\n pygame.display.flip()\r\n running = True\r\n\r\n while running:\r\n\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n return\r\n quit()\r\n\r\n screen.blit(background, (0, 0))\r\n pygame.draw.rect(screen, GRAY,(400,650,190,60))\r\n draw_text(screen, \"Credits\", 60, 500, 100)\r\n draw_text(screen, \"Vincent\", 30, 500, 250)\r\n draw_text(screen, \"Chevery\", 30, 500, 330)\r\n draw_text(screen, \"Charlie\", 30, 500, 410)\r\n draw_text(screen, \"Julian\", 30, 500, 490)\r\n draw_text(screen, \"Sheriyar\", 30, 500, 570)\r\n draw_text(screen, \"Julian\", 30, 500, 650)\r\n\r\n mouse = pygame.mouse.get_pos()\r\n\r\n if 400+190 > mouse[0] > 400 and 650+60 > mouse[1] > 650:\r\n pygame.draw.rect(screen, GRAY80,(400,650,190,60))\r\n else:\r\n pygame.draw.rect(screen, GRAY,(400,650,190,60))\r\n \r\n draw_text(screen, \"EXIT\", 40, 488, 660)\r\n #screen.blit(arrow, imagerect)\r\n button(\"EXIT\",400,650,190,60,GRAY,GRAY80,quit)\r\n \r\n pygame.display.flip()\r\n\r\nif __name__ == '__main__': \r\n main()\r\n pygame.quit()\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('haha')
<|reserved_special_token_1|>
import cv2
import torch
print('haha')
|
flexible
|
{
"blob_id": "00f8992173321dfa5ac5b125a2e663b159fafb23",
"index": 4267,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('haha')\n",
"step-3": "import cv2\nimport torch\nprint('haha')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def largestVar(s: str):
freq = {i: (0) for i in range(26)}
for i in range(len(s)):
freq[int(chr(i) - 'a')] += 1
max_var = 0
for a in range(26):
for b in range(26):
left_a = freq[a]
left_b = freq[b]
<|reserved_special_token_1|>
def largestVar(s: str):
freq = {i:0 for i in range(26)}
for i in range(len(s)):
freq[(int) (chr(i) - 'a')] += 1
max_var = 0
for a in range(26):
for b in range(26):
left_a = freq[a]
left_b = freq[b]
|
flexible
|
{
"blob_id": "4bd2923381cd3ead9a5605363a86f41b3743bf27",
"index": 7223,
"step-1": "<mask token>\n",
"step-2": "def largestVar(s: str):\n freq = {i: (0) for i in range(26)}\n for i in range(len(s)):\n freq[int(chr(i) - 'a')] += 1\n max_var = 0\n for a in range(26):\n for b in range(26):\n left_a = freq[a]\n left_b = freq[b]\n",
"step-3": "\ndef largestVar(s: str):\n freq = {i:0 for i in range(26)}\n for i in range(len(s)):\n freq[(int) (chr(i) - 'a')] += 1\n \n max_var = 0\n for a in range(26):\n for b in range(26):\n left_a = freq[a]\n left_b = freq[b]\n \n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from wtforms import Form, StringField
class SearchForm(Form):
criteria = StringField("Texto a buscar")
|
normal
|
{
"blob_id": "1896f4d5b304915d5cbbb30b0a83854c4a8cc60c",
"index": 7566,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass SearchForm(Form):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SearchForm(Form):\n criteria = StringField('Texto a buscar')\n",
"step-4": "from wtforms import Form, StringField\n\n\nclass SearchForm(Form):\n criteria = StringField('Texto a buscar')\n",
"step-5": "from wtforms import Form, StringField\n\nclass SearchForm(Form):\n\tcriteria = StringField(\"Texto a buscar\")\n\t\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestDefaultApi(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def tearDown(self):
pass
<|reserved_special_token_0|>
def test_meme_meme_id_delete(self):
"""Test case for meme_meme_id_delete
Delete meme by ID # noqa: E501
"""
pass
def test_meme_meme_id_get(self):
"""Test case for meme_meme_id_get
Get meme by ID # noqa: E501
"""
pass
def test_meme_post(self):
"""Test case for meme_post
Post meme # noqa: E501
"""
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestDefaultApi(unittest.TestCase):
"""DefaultApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.default_api.DefaultApi()
def tearDown(self):
pass
def test_meme_get(self):
"""Test case for meme_get
Get meme(s) # noqa: E501
"""
pass
def test_meme_meme_id_delete(self):
"""Test case for meme_meme_id_delete
Delete meme by ID # noqa: E501
"""
pass
def test_meme_meme_id_get(self):
"""Test case for meme_meme_id_get
Get meme by ID # noqa: E501
"""
pass
def test_meme_post(self):
"""Test case for meme_post
Post meme # noqa: E501
"""
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestDefaultApi(unittest.TestCase):
"""DefaultApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.default_api.DefaultApi()
def tearDown(self):
pass
def test_meme_get(self):
"""Test case for meme_get
Get meme(s) # noqa: E501
"""
pass
def test_meme_meme_id_delete(self):
"""Test case for meme_meme_id_delete
Delete meme by ID # noqa: E501
"""
pass
def test_meme_meme_id_get(self):
"""Test case for meme_meme_id_get
Get meme by ID # noqa: E501
"""
pass
def test_meme_post(self):
"""Test case for meme_post
Post meme # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.default_api import DefaultApi
from swagger_client.rest import ApiException
class TestDefaultApi(unittest.TestCase):
"""DefaultApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.default_api.DefaultApi()
def tearDown(self):
pass
def test_meme_get(self):
"""Test case for meme_get
Get meme(s) # noqa: E501
"""
pass
def test_meme_meme_id_delete(self):
"""Test case for meme_meme_id_delete
Delete meme by ID # noqa: E501
"""
pass
def test_meme_meme_id_get(self):
"""Test case for meme_meme_id_get
Get meme by ID # noqa: E501
"""
pass
def test_meme_post(self):
"""Test case for meme_post
Post meme # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
# coding: utf-8
"""
Meme Meister
API to create memes # noqa: E501
OpenAPI spec version: 0.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.default_api import DefaultApi # noqa: E501
from swagger_client.rest import ApiException
class TestDefaultApi(unittest.TestCase):
"""DefaultApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.default_api.DefaultApi() # noqa: E501
def tearDown(self):
pass
def test_meme_get(self):
"""Test case for meme_get
Get meme(s) # noqa: E501
"""
pass
def test_meme_meme_id_delete(self):
"""Test case for meme_meme_id_delete
Delete meme by ID # noqa: E501
"""
pass
def test_meme_meme_id_get(self):
"""Test case for meme_meme_id_get
Get meme by ID # noqa: E501
"""
pass
def test_meme_post(self):
"""Test case for meme_post
Post meme # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "fca46c095972e8190ee9c93f3bddbb2a49363a7f",
"index": 6903,
"step-1": "<mask token>\n\n\nclass TestDefaultApi(unittest.TestCase):\n <mask token>\n <mask token>\n\n def tearDown(self):\n pass\n <mask token>\n\n def test_meme_meme_id_delete(self):\n \"\"\"Test case for meme_meme_id_delete\n\n Delete meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_get(self):\n \"\"\"Test case for meme_meme_id_get\n\n Get meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_post(self):\n \"\"\"Test case for meme_post\n\n Post meme # noqa: E501\n \"\"\"\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestDefaultApi(unittest.TestCase):\n \"\"\"DefaultApi unit test stubs\"\"\"\n\n def setUp(self):\n self.api = swagger_client.api.default_api.DefaultApi()\n\n def tearDown(self):\n pass\n\n def test_meme_get(self):\n \"\"\"Test case for meme_get\n\n Get meme(s) # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_delete(self):\n \"\"\"Test case for meme_meme_id_delete\n\n Delete meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_get(self):\n \"\"\"Test case for meme_meme_id_get\n\n Get meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_post(self):\n \"\"\"Test case for meme_post\n\n Post meme # noqa: E501\n \"\"\"\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestDefaultApi(unittest.TestCase):\n \"\"\"DefaultApi unit test stubs\"\"\"\n\n def setUp(self):\n self.api = swagger_client.api.default_api.DefaultApi()\n\n def tearDown(self):\n pass\n\n def test_meme_get(self):\n \"\"\"Test case for meme_get\n\n Get meme(s) # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_delete(self):\n \"\"\"Test case for meme_meme_id_delete\n\n Delete meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_get(self):\n \"\"\"Test case for meme_meme_id_get\n\n Get meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_post(self):\n \"\"\"Test case for meme_post\n\n Post meme # noqa: E501\n \"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import\nimport unittest\nimport swagger_client\nfrom swagger_client.api.default_api import DefaultApi\nfrom swagger_client.rest import ApiException\n\n\nclass TestDefaultApi(unittest.TestCase):\n \"\"\"DefaultApi unit test stubs\"\"\"\n\n def setUp(self):\n self.api = swagger_client.api.default_api.DefaultApi()\n\n def tearDown(self):\n pass\n\n def test_meme_get(self):\n \"\"\"Test case for meme_get\n\n Get meme(s) # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_delete(self):\n \"\"\"Test case for meme_meme_id_delete\n\n Delete meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_get(self):\n \"\"\"Test case for meme_meme_id_get\n\n Get meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_post(self):\n \"\"\"Test case for meme_post\n\n Post meme # noqa: E501\n \"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n Meme Meister\n\n API to create memes # noqa: E501\n\n OpenAPI spec version: 0.1.0\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\nimport unittest\n\nimport swagger_client\nfrom swagger_client.api.default_api import DefaultApi # noqa: E501\nfrom swagger_client.rest import ApiException\n\n\nclass TestDefaultApi(unittest.TestCase):\n \"\"\"DefaultApi unit test stubs\"\"\"\n\n def setUp(self):\n self.api = swagger_client.api.default_api.DefaultApi() # noqa: E501\n\n def tearDown(self):\n pass\n\n def test_meme_get(self):\n \"\"\"Test case for meme_get\n\n Get meme(s) # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_delete(self):\n \"\"\"Test case for meme_meme_id_delete\n\n Delete meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_meme_id_get(self):\n \"\"\"Test case for meme_meme_id_get\n\n Get meme by ID # noqa: E501\n \"\"\"\n pass\n\n def test_meme_post(self):\n \"\"\"Test case for meme_post\n\n Post meme # noqa: E501\n \"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
5,
8,
9,
10,
11
]
}
|
[
5,
8,
9,
10,
11
] |
import torch
import torch.multiprocessing as mp
import random
class QManeger(object):
def __init__(self, opt, q_trace, q_batch):
self.traces_s = []
self.traces_a = []
self.traces_r = []
self.lock = mp.Lock()
self.q_trace = q_trace
self.q_batch = q_batch
self.opt = opt
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def _push_one(self, state, action, reward):
self.traces_s.append(state)
self.traces_a.append(action)
self.traces_r.append(reward)
def listening(self):
while True:
traces = self.q_trace.get(block=True)
for s, a, r in zip(traces[0], traces[1], traces[2]):
self._push_one(s, a, r)
if len(self.traces_s) > self.opt.batch_size:
self.produce_batch()
def produce_batch(self):
batch_size = self.opt.batch_size
res_s, res_a, res_r = self.traces_s[:batch_size], self.traces_a[:batch_size], \
self.traces_r[:batch_size]
# delete
del self.traces_s[:batch_size]
del self.traces_a[:batch_size]
del self.traces_r[:batch_size]
res_s = torch.FloatTensor(res_s).to(self.device)
res_a = torch.LongTensor(res_a).to(self.device)
res_r = torch.FloatTensor(res_r).to(self.device).view(-1, 1)
# stack batch and put
self.q_batch.put((res_s, res_a, res_r))
|
normal
|
{
"blob_id": "b693cc63e2ee4c994ef7b5e44faea99f15a021f6",
"index": 68,
"step-1": "<mask token>\n\n\nclass QManeger(object):\n <mask token>\n <mask token>\n\n def listening(self):\n while True:\n traces = self.q_trace.get(block=True)\n for s, a, r in zip(traces[0], traces[1], traces[2]):\n self._push_one(s, a, r)\n if len(self.traces_s) > self.opt.batch_size:\n self.produce_batch()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass QManeger(object):\n\n def __init__(self, opt, q_trace, q_batch):\n self.traces_s = []\n self.traces_a = []\n self.traces_r = []\n self.lock = mp.Lock()\n self.q_trace = q_trace\n self.q_batch = q_batch\n self.opt = opt\n self.device = torch.device('cuda' if torch.cuda.is_available() else\n 'cpu')\n <mask token>\n\n def listening(self):\n while True:\n traces = self.q_trace.get(block=True)\n for s, a, r in zip(traces[0], traces[1], traces[2]):\n self._push_one(s, a, r)\n if len(self.traces_s) > self.opt.batch_size:\n self.produce_batch()\n\n def produce_batch(self):\n batch_size = self.opt.batch_size\n res_s, res_a, res_r = self.traces_s[:batch_size], self.traces_a[:\n batch_size], self.traces_r[:batch_size]\n del self.traces_s[:batch_size]\n del self.traces_a[:batch_size]\n del self.traces_r[:batch_size]\n res_s = torch.FloatTensor(res_s).to(self.device)\n res_a = torch.LongTensor(res_a).to(self.device)\n res_r = torch.FloatTensor(res_r).to(self.device).view(-1, 1)\n self.q_batch.put((res_s, res_a, res_r))\n",
"step-3": "<mask token>\n\n\nclass QManeger(object):\n\n def __init__(self, opt, q_trace, q_batch):\n self.traces_s = []\n self.traces_a = []\n self.traces_r = []\n self.lock = mp.Lock()\n self.q_trace = q_trace\n self.q_batch = q_batch\n self.opt = opt\n self.device = torch.device('cuda' if torch.cuda.is_available() else\n 'cpu')\n\n def _push_one(self, state, action, reward):\n self.traces_s.append(state)\n self.traces_a.append(action)\n self.traces_r.append(reward)\n\n def listening(self):\n while True:\n traces = self.q_trace.get(block=True)\n for s, a, r in zip(traces[0], traces[1], traces[2]):\n self._push_one(s, a, r)\n if len(self.traces_s) > self.opt.batch_size:\n self.produce_batch()\n\n def produce_batch(self):\n batch_size = self.opt.batch_size\n res_s, res_a, res_r = self.traces_s[:batch_size], self.traces_a[:\n batch_size], self.traces_r[:batch_size]\n del self.traces_s[:batch_size]\n del self.traces_a[:batch_size]\n del self.traces_r[:batch_size]\n res_s = torch.FloatTensor(res_s).to(self.device)\n res_a = torch.LongTensor(res_a).to(self.device)\n res_r = torch.FloatTensor(res_r).to(self.device).view(-1, 1)\n self.q_batch.put((res_s, res_a, res_r))\n",
"step-4": "import torch\nimport torch.multiprocessing as mp\nimport random\n\n\nclass QManeger(object):\n\n def __init__(self, opt, q_trace, q_batch):\n self.traces_s = []\n self.traces_a = []\n self.traces_r = []\n self.lock = mp.Lock()\n self.q_trace = q_trace\n self.q_batch = q_batch\n self.opt = opt\n self.device = torch.device('cuda' if torch.cuda.is_available() else\n 'cpu')\n\n def _push_one(self, state, action, reward):\n self.traces_s.append(state)\n self.traces_a.append(action)\n self.traces_r.append(reward)\n\n def listening(self):\n while True:\n traces = self.q_trace.get(block=True)\n for s, a, r in zip(traces[0], traces[1], traces[2]):\n self._push_one(s, a, r)\n if len(self.traces_s) > self.opt.batch_size:\n self.produce_batch()\n\n def produce_batch(self):\n batch_size = self.opt.batch_size\n res_s, res_a, res_r = self.traces_s[:batch_size], self.traces_a[:\n batch_size], self.traces_r[:batch_size]\n del self.traces_s[:batch_size]\n del self.traces_a[:batch_size]\n del self.traces_r[:batch_size]\n res_s = torch.FloatTensor(res_s).to(self.device)\n res_a = torch.LongTensor(res_a).to(self.device)\n res_r = torch.FloatTensor(res_r).to(self.device).view(-1, 1)\n self.q_batch.put((res_s, res_a, res_r))\n",
"step-5": "import torch\nimport torch.multiprocessing as mp\nimport random\n\nclass QManeger(object):\n\n def __init__(self, opt, q_trace, q_batch):\n self.traces_s = []\n self.traces_a = []\n self.traces_r = []\n self.lock = mp.Lock()\n\n self.q_trace = q_trace\n self.q_batch = q_batch\n self.opt = opt\n self.device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n def _push_one(self, state, action, reward):\n self.traces_s.append(state)\n self.traces_a.append(action)\n self.traces_r.append(reward)\n\n def listening(self):\n while True:\n traces = self.q_trace.get(block=True)\n for s, a, r in zip(traces[0], traces[1], traces[2]):\n self._push_one(s, a, r)\n\n if len(self.traces_s) > self.opt.batch_size:\n self.produce_batch()\n\n def produce_batch(self):\n batch_size = self.opt.batch_size\n\n res_s, res_a, res_r = self.traces_s[:batch_size], self.traces_a[:batch_size], \\\n self.traces_r[:batch_size]\n\n # delete\n del self.traces_s[:batch_size]\n del self.traces_a[:batch_size]\n del self.traces_r[:batch_size]\n\n res_s = torch.FloatTensor(res_s).to(self.device)\n res_a = torch.LongTensor(res_a).to(self.device)\n res_r = torch.FloatTensor(res_r).to(self.device).view(-1, 1)\n\n # stack batch and put\n self.q_batch.put((res_s, res_a, res_r))\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class BruteForceAttackState(State):
def run(self):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',
'127.0.0.42'])
for i in range(self.iterations):
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(0.2)
class NoAlarmState(State):
def run(self):
for i in range(self.iterations):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',
'127.0.0.13', '127.0.0.42'])
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(1.5)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class State:
def __init__(self, path, iterations):
self.path = path
self.iterations = iterations
def run(self):
assert 0, 'run not implemented'
class BruteForceAttackState(State):
def run(self):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',
'127.0.0.42'])
for i in range(self.iterations):
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(0.2)
class NoAlarmState(State):
def run(self):
for i in range(self.iterations):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',
'127.0.0.13', '127.0.0.42'])
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(1.5)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
log_host = 'agent1'
class State:
def __init__(self, path, iterations):
self.path = path
self.iterations = iterations
def run(self):
assert 0, 'run not implemented'
class BruteForceAttackState(State):
def run(self):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',
'127.0.0.42'])
for i in range(self.iterations):
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(0.2)
class NoAlarmState(State):
def run(self):
for i in range(self.iterations):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',
'127.0.0.13', '127.0.0.42'])
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(1.5)
<|reserved_special_token_1|>
import numpy as np
import time
import uuid
from datetime import datetime
log_host = 'agent1'
class State:
def __init__(self, path, iterations):
self.path = path
self.iterations = iterations
def run(self):
assert 0, 'run not implemented'
class BruteForceAttackState(State):
def run(self):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',
'127.0.0.42'])
for i in range(self.iterations):
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(0.2)
class NoAlarmState(State):
def run(self):
for i in range(self.iterations):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',
'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',
'127.0.0.13', '127.0.0.42'])
timestamp = datetime.now()
log_id = uuid.uuid4()
message = 'Unsuccessful login attempt'
os = os_val
log_type = 'Informational'
host = log_host
log_machine = addr_val
log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +
os + '|' + log_type + '|' + host + '|' + log_machine)
print(log)
f = open(self.path, 'a')
f.write(log + '\n')
f.close()
time.sleep(1.5)
<|reserved_special_token_1|>
import numpy as np
import time
import uuid
from datetime import datetime
log_host = "agent1"
class State:
def __init__(self, path, iterations):
self.path = path
self.iterations = iterations
def run(self):
assert 0, "run not implemented"
class BruteForceAttackState(State):
def run(self):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])
for i in range(self.iterations):
timestamp = datetime.now()
log_id = uuid.uuid4()
message = "Unsuccessful login attempt"
os = os_val
log_type = "Informational"
host = log_host
log_machine = addr_val
log = str(timestamp)+"|"+str(log_id)+"|"+message+"|"+os+"|"+log_type+"|"+host+"|"+log_machine
print(log)
f = open(self.path, "a")
f.write(log + "\n")
f.close()
time.sleep(0.2)
class NoAlarmState(State):
def run(self):
for i in range(self.iterations):
os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])
addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])
timestamp = datetime.now()
log_id = uuid.uuid4()
message = "Unsuccessful login attempt"
os = os_val
log_type = "Informational"
host = log_host
log_machine = addr_val
log = str(timestamp)+"|"+str(log_id)+"|"+message+"|"+os+"|"+log_type+"|"+host+"|"+log_machine
print(log)
f = open(self.path, "a")
f.write(log + "\n")
f.close()
time.sleep(1.5)
|
flexible
|
{
"blob_id": "cf3b4e2c76091f95d24e8a987a63ece46503d6e8",
"index": 3459,
"step-1": "<mask token>\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n",
"step-2": "<mask token>\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n",
"step-3": "<mask token>\nlog_host = 'agent1'\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n",
"step-4": "import numpy as np\nimport time\nimport uuid\nfrom datetime import datetime\nlog_host = 'agent1'\n\n\nclass State:\n\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, 'run not implemented'\n\n\nclass BruteForceAttackState(State):\n\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13',\n '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16',\n 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7',\n '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = 'Unsuccessful login attempt'\n os = os_val\n log_type = 'Informational'\n host = log_host\n log_machine = addr_val\n log = (str(timestamp) + '|' + str(log_id) + '|' + message + '|' +\n os + '|' + log_type + '|' + host + '|' + log_machine)\n print(log)\n f = open(self.path, 'a')\n f.write(log + '\\n')\n f.close()\n time.sleep(1.5)\n",
"step-5": "import numpy as np\nimport time\nimport uuid\nfrom datetime import datetime\n\n\nlog_host = \"agent1\"\n\n\nclass State:\n def __init__(self, path, iterations):\n self.path = path\n self.iterations = iterations\n\n def run(self):\n assert 0, \"run not implemented\"\n\n\nclass BruteForceAttackState(State):\n def run(self):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])\n for i in range(self.iterations):\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = \"Unsuccessful login attempt\"\n os = os_val\n log_type = \"Informational\"\n host = log_host\n log_machine = addr_val\n\n log = str(timestamp)+\"|\"+str(log_id)+\"|\"+message+\"|\"+os+\"|\"+log_type+\"|\"+host+\"|\"+log_machine\n print(log)\n\n f = open(self.path, \"a\")\n f.write(log + \"\\n\")\n f.close()\n time.sleep(0.2)\n\n\nclass NoAlarmState(State):\n def run(self):\n for i in range(self.iterations):\n os_val = np.random.choice(['Windows7', 'Windows10', 'Ubuntu16', 'MacOS10'])\n addr_val = np.random.choice(['127.0.0.6', '127.0.0.7', '127.0.0.13', '127.0.0.42'])\n timestamp = datetime.now()\n log_id = uuid.uuid4()\n message = \"Unsuccessful login attempt\"\n os = os_val\n log_type = \"Informational\"\n host = log_host\n log_machine = addr_val\n\n log = str(timestamp)+\"|\"+str(log_id)+\"|\"+message+\"|\"+os+\"|\"+log_type+\"|\"+host+\"|\"+log_machine\n print(log)\n\n f = open(self.path, \"a\")\n f.write(log + \"\\n\")\n f.close()\n time.sleep(1.5)\n",
"step-ids": [
4,
7,
8,
9,
10
]
}
|
[
4,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MarketingemailsConfig(AppConfig):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MarketingemailsConfig(AppConfig):
name = 'marketingemails'
<|reserved_special_token_1|>
from django.apps import AppConfig
class MarketingemailsConfig(AppConfig):
name = 'marketingemails'
|
flexible
|
{
"blob_id": "19bb58ab440ca00bf6410a70a8b6bbc24eec96c1",
"index": 492,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass MarketingemailsConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass MarketingemailsConfig(AppConfig):\n name = 'marketingemails'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass MarketingemailsConfig(AppConfig):\n name = 'marketingemails'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def layer_forward(x, w):
"""
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
"""
z = None
output = []
cache = x, w, z, output
return output, cache
<|reserved_special_token_0|>
def affine_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, M)
- cache (cache): (x, w)
return:
- gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))
"""
x, w, b = cache
N = d_output.shape[0]
d_x = d_output.dot(w.T).reshape(x.shape)
d_w = x.reshape([N, -1]).T.dot(d_output)
d_b = np.sum(d_output, axis=0)
return d_x, d_w, d_b
def relu_forward(x):
"""
input:
- inputs (x): (N, d_1, ..., d_k)
return:
- output: (N, d_1, ..., d_k)
- cache: x
"""
output = np.fmax(x, 0)
cache = x
return output, cache
def relu_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, d_1, ..., d_k)
- cache for x (cache): (N, d_1, ..., d_k)
return:
- d_x: gradient with respect to x
"""
x = cache
d_x = np.sign(np.fmax(x, 0)) * d_output
return d_x
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def layer_forward(x, w):
"""
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
"""
z = None
output = []
cache = x, w, z, output
return output, cache
<|reserved_special_token_0|>
def affine_forward(x, w, b):
"""
A simple linear feedforward (affine)
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
- bias (b): (M,)
return:
- output: (N, M)
- cache: (x, w, b)
"""
N = x.shape[0]
output = x.reshape([N, -1]).dot(w) + b
cache = x, w, b
return output, cache
def affine_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, M)
- cache (cache): (x, w)
return:
- gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))
"""
x, w, b = cache
N = d_output.shape[0]
d_x = d_output.dot(w.T).reshape(x.shape)
d_w = x.reshape([N, -1]).T.dot(d_output)
d_b = np.sum(d_output, axis=0)
return d_x, d_w, d_b
def relu_forward(x):
"""
input:
- inputs (x): (N, d_1, ..., d_k)
return:
- output: (N, d_1, ..., d_k)
- cache: x
"""
output = np.fmax(x, 0)
cache = x
return output, cache
def relu_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, d_1, ..., d_k)
- cache for x (cache): (N, d_1, ..., d_k)
return:
- d_x: gradient with respect to x
"""
x = cache
d_x = np.sign(np.fmax(x, 0)) * d_output
return d_x
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def layer_forward(x, w):
"""
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
"""
z = None
output = []
cache = x, w, z, output
return output, cache
def layer_backward(d_output, cache):
""" Receive derivative of loss with respect
to outputs and cache, and compute derivative
with respect to inputs
"""
x, w, z, output = cache
d_x, d_w = None, None
return d_x, d_w
def affine_forward(x, w, b):
"""
A simple linear feedforward (affine)
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
- bias (b): (M,)
return:
- output: (N, M)
- cache: (x, w, b)
"""
N = x.shape[0]
output = x.reshape([N, -1]).dot(w) + b
cache = x, w, b
return output, cache
def affine_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, M)
- cache (cache): (x, w)
return:
- gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))
"""
x, w, b = cache
N = d_output.shape[0]
d_x = d_output.dot(w.T).reshape(x.shape)
d_w = x.reshape([N, -1]).T.dot(d_output)
d_b = np.sum(d_output, axis=0)
return d_x, d_w, d_b
def relu_forward(x):
"""
input:
- inputs (x): (N, d_1, ..., d_k)
return:
- output: (N, d_1, ..., d_k)
- cache: x
"""
output = np.fmax(x, 0)
cache = x
return output, cache
def relu_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, d_1, ..., d_k)
- cache for x (cache): (N, d_1, ..., d_k)
return:
- d_x: gradient with respect to x
"""
x = cache
d_x = np.sign(np.fmax(x, 0)) * d_output
return d_x
<|reserved_special_token_1|>
import numpy as np
def layer_forward(x, w):
"""
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
"""
z = None
output = []
cache = x, w, z, output
return output, cache
def layer_backward(d_output, cache):
""" Receive derivative of loss with respect
to outputs and cache, and compute derivative
with respect to inputs
"""
x, w, z, output = cache
d_x, d_w = None, None
return d_x, d_w
def affine_forward(x, w, b):
"""
A simple linear feedforward (affine)
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
- bias (b): (M,)
return:
- output: (N, M)
- cache: (x, w, b)
"""
N = x.shape[0]
output = x.reshape([N, -1]).dot(w) + b
cache = x, w, b
return output, cache
def affine_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, M)
- cache (cache): (x, w)
return:
- gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))
"""
x, w, b = cache
N = d_output.shape[0]
d_x = d_output.dot(w.T).reshape(x.shape)
d_w = x.reshape([N, -1]).T.dot(d_output)
d_b = np.sum(d_output, axis=0)
return d_x, d_w, d_b
def relu_forward(x):
"""
input:
- inputs (x): (N, d_1, ..., d_k)
return:
- output: (N, d_1, ..., d_k)
- cache: x
"""
output = np.fmax(x, 0)
cache = x
return output, cache
def relu_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, d_1, ..., d_k)
- cache for x (cache): (N, d_1, ..., d_k)
return:
- d_x: gradient with respect to x
"""
x = cache
d_x = np.sign(np.fmax(x, 0)) * d_output
return d_x
<|reserved_special_token_1|>
import numpy as np
def layer_forward(x, w):
"""
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
"""
# intermediate value (z)
z = None
output = []
cache = (x, w, z, output)
return output, cache
def layer_backward(d_output, cache):
""" Receive derivative of loss with respect
to outputs and cache, and compute derivative
with respect to inputs
"""
# Unpack cache values
x, w, z, output = cache
# Compute derivatives (gradients)
d_x, d_w = None, None
return d_x, d_w
def affine_forward(x, w, b):
"""
A simple linear feedforward (affine)
input:
- inputs (x): (N, d_1, ..., d_k),
- weights (w): (D, M)
- bias (b): (M,)
return:
- output: (N, M)
- cache: (x, w, b)
"""
N = x.shape[0]
# reshape input into rows
output = x.reshape([N, -1]).dot(w) + b
cache = (x, w, b)
return output, cache
def affine_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, M)
- cache (cache): (x, w)
return:
- gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))
"""
# Unpack cache values
x, w, b = cache
N = d_output.shape[0]
d_x = d_output.dot(w.T).reshape(x.shape)
d_w = x.reshape([N, -1]).T.dot(d_output)
d_b = np.sum(d_output, axis=0)
return d_x, d_w, d_b
def relu_forward(x):
"""
input:
- inputs (x): (N, d_1, ..., d_k)
return:
- output: (N, d_1, ..., d_k)
- cache: x
"""
output = np.fmax(x, 0)
cache = x
return output, cache
def relu_backward(d_output, cache):
"""
input:
- upstream derivative (d_output): (N, d_1, ..., d_k)
- cache for x (cache): (N, d_1, ..., d_k)
return:
- d_x: gradient with respect to x
"""
x = cache
d_x = np.sign(np.fmax(x, 0)) * d_output
return d_x
|
flexible
|
{
"blob_id": "c1fd6e940b3b15ae01a102b3c0aba9bd327c77b2",
"index": 8403,
"step-1": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\n<mask token>\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n",
"step-2": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\n<mask token>\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n",
"step-3": "<mask token>\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n x, w, z, output = cache\n d_x, d_w = None, None\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n",
"step-4": "import numpy as np\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n z = None\n output = []\n cache = x, w, z, output\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n x, w, z, output = cache\n d_x, d_w = None, None\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n output = x.reshape([N, -1]).dot(w) + b\n cache = x, w, b\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n x, w, b = cache\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n return d_x\n",
"step-5": "import numpy as np\n\n\ndef layer_forward(x, w):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n \"\"\"\n # intermediate value (z)\n z = None\n output = []\n cache = (x, w, z, output)\n\n return output, cache\n\n\ndef layer_backward(d_output, cache):\n \"\"\" Receive derivative of loss with respect\n to outputs and cache, and compute derivative\n with respect to inputs\n \"\"\"\n\n # Unpack cache values\n x, w, z, output = cache\n\n # Compute derivatives (gradients)\n d_x, d_w = None, None\n\n return d_x, d_w\n\n\ndef affine_forward(x, w, b):\n \"\"\"\n A simple linear feedforward (affine)\n input:\n - inputs (x): (N, d_1, ..., d_k),\n - weights (w): (D, M)\n - bias (b): (M,)\n return:\n - output: (N, M)\n - cache: (x, w, b)\n \"\"\"\n N = x.shape[0]\n\n # reshape input into rows\n output = x.reshape([N, -1]).dot(w) + b\n cache = (x, w, b)\n\n return output, cache\n\n\ndef affine_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, M)\n - cache (cache): (x, w)\n return:\n - gradients (dx, d_w, d_b): ((N, d1, ..., d_k)(D, M), (M,))\n \"\"\"\n\n # Unpack cache values\n x, w, b = cache\n\n N = d_output.shape[0]\n d_x = d_output.dot(w.T).reshape(x.shape)\n d_w = x.reshape([N, -1]).T.dot(d_output)\n d_b = np.sum(d_output, axis=0)\n\n return d_x, d_w, d_b\n\n\ndef relu_forward(x):\n \"\"\"\n input:\n - inputs (x): (N, d_1, ..., d_k)\n return:\n - output: (N, d_1, ..., d_k)\n - cache: x\n \"\"\"\n output = np.fmax(x, 0)\n cache = x\n\n return output, cache\n\n\ndef relu_backward(d_output, cache):\n \"\"\"\n input:\n - upstream derivative (d_output): (N, d_1, ..., d_k)\n - cache for x (cache): (N, d_1, ..., d_k)\n return:\n - d_x: gradient with respect to x\n \"\"\"\n x = cache\n d_x = np.sign(np.fmax(x, 0)) * d_output\n\n return d_x\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import numpy as np
#1
def longest_substring(string1,string2):
mat=np.zeros(shape=(len(string1),len(string2)))
for x in range(len(string1)):
for y in range(len(string2)):
if x==0 or y==0:
if string1[x]==string2[y]:
mat[x,y]=1
else:
if string1[x]==string2[y]:
mat[x,y]=mat[x-1,y-1]+1
agmx=np.argmax(mat)
iofagmx=np.unravel_index(agmx,mat.shape)
numbofstr=int(np.max(mat))
endstring=string1[iofagmx[0]-numbofstr+1:iofagmx[0]+1]
return endstring
if __name__ == '__main__':
assert longest_substring("jsanad","anasc") == "ana"
assert longest_substring("ilovebioinformatics","icantwaitformax") == "forma"
assert longest_substring("ironmansaregreat","triathlonforever") == "on"
assert longest_substring("ihatewalking","nobikenolife") == "i"
assert longest_substring("gofaster","govegan") == "go"
|
normal
|
{
"blob_id": "6bb7dafea73aff7aca9b0ddc1393e4db6fcf0151",
"index": 4828,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\nif __name__ == '__main__':\n assert longest_substring('jsanad', 'anasc') == 'ana'\n assert longest_substring('ilovebioinformatics', 'icantwaitformax'\n ) == 'forma'\n assert longest_substring('ironmansaregreat', 'triathlonforever') == 'on'\n assert longest_substring('ihatewalking', 'nobikenolife') == 'i'\n assert longest_substring('gofaster', 'govegan') == 'go'\n",
"step-4": "import numpy as np\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\nif __name__ == '__main__':\n assert longest_substring('jsanad', 'anasc') == 'ana'\n assert longest_substring('ilovebioinformatics', 'icantwaitformax'\n ) == 'forma'\n assert longest_substring('ironmansaregreat', 'triathlonforever') == 'on'\n assert longest_substring('ihatewalking', 'nobikenolife') == 'i'\n assert longest_substring('gofaster', 'govegan') == 'go'\n",
"step-5": "import numpy as np\n#1\ndef longest_substring(string1,string2):\n mat=np.zeros(shape=(len(string1),len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x==0 or y==0:\n if string1[x]==string2[y]:\n mat[x,y]=1\n else:\n if string1[x]==string2[y]:\n mat[x,y]=mat[x-1,y-1]+1\n agmx=np.argmax(mat)\n iofagmx=np.unravel_index(agmx,mat.shape)\n numbofstr=int(np.max(mat))\n endstring=string1[iofagmx[0]-numbofstr+1:iofagmx[0]+1]\n return endstring\n \nif __name__ == '__main__':\n assert longest_substring(\"jsanad\",\"anasc\") == \"ana\"\n assert longest_substring(\"ilovebioinformatics\",\"icantwaitformax\") == \"forma\"\n assert longest_substring(\"ironmansaregreat\",\"triathlonforever\") == \"on\"\n assert longest_substring(\"ihatewalking\",\"nobikenolife\") == \"i\"\n assert longest_substring(\"gofaster\",\"govegan\") == \"go\" \n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class SwarmModel:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class NeighbourhoodModel:
_particles = []
_bestPosition = None
_bestPositionFitness = -1
def __init__(self, particles):
self._particles = particles
self._bestPosition = None
self._bestPositionFitness = -1
class KnapsackSolutionModel:
_items = []
_knapsackSize = None
def __init__(self, items, size):
self._items = items
self._knapsackSize = size
class TSPSolutionModel:
_edges = {}
_startNode = None
_numOfCities = None
_bestPath = []
def __init__(self, edges, numOfCities, startNode):
self._edges = edges
self._numOfCities = numOfCities
self._startNode = startNode
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SwarmModel:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self):
self._particles = []
self._neighbourhoods = None
self._bestPosition = None
self._bestPositionFitness = -1
class NeighbourhoodModel:
_particles = []
_bestPosition = None
_bestPositionFitness = -1
def __init__(self, particles):
self._particles = particles
self._bestPosition = None
self._bestPositionFitness = -1
class KnapsackSolutionModel:
_items = []
_knapsackSize = None
def __init__(self, items, size):
self._items = items
self._knapsackSize = size
class TSPSolutionModel:
_edges = {}
_startNode = None
_numOfCities = None
_bestPath = []
def __init__(self, edges, numOfCities, startNode):
self._edges = edges
self._numOfCities = numOfCities
self._startNode = startNode
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SwarmModel:
_particles = None
_neighbourhoods = None
_bestPosition = None
_bestPositionFitness = -1
def __init__(self):
self._particles = []
self._neighbourhoods = None
self._bestPosition = None
self._bestPositionFitness = -1
class NeighbourhoodModel:
_particles = []
_bestPosition = None
_bestPositionFitness = -1
def __init__(self, particles):
self._particles = particles
self._bestPosition = None
self._bestPositionFitness = -1
class KnapsackSolutionModel:
_items = []
_knapsackSize = None
def __init__(self, items, size):
self._items = items
self._knapsackSize = size
class TSPSolutionModel:
_edges = {}
_startNode = None
_numOfCities = None
_bestPath = []
def __init__(self, edges, numOfCities, startNode):
self._edges = edges
self._numOfCities = numOfCities
self._startNode = startNode
<|reserved_special_token_1|>
class ParticleModel:
_position = None
_velocity = None
_bestPosition = None
_nbBestPosition = None
_fitness = -1
def __init__(self):
self._position = None
self._velocity = None
self._bestPosition = None
self._nbBestPosition = None
self._fitness = -1
class SwarmModel:
_particles = None
_neighbourhoods = None
_bestPosition = None
_bestPositionFitness = -1
def __init__(self):
self._particles = []
self._neighbourhoods = None
self._bestPosition = None
self._bestPositionFitness = -1
class NeighbourhoodModel:
_particles = []
_bestPosition = None
_bestPositionFitness = -1
def __init__(self, particles):
self._particles = particles
self._bestPosition = None
self._bestPositionFitness = -1
class KnapsackSolutionModel:
_items = []
_knapsackSize = None
def __init__(self, items, size):
self._items = items
self._knapsackSize = size
class TSPSolutionModel:
_edges = {}
_startNode = None
_numOfCities = None
_bestPath = []
def __init__(self, edges, numOfCities, startNode):
self._edges = edges
self._numOfCities = numOfCities
self._startNode = startNode
<|reserved_special_token_1|>
#===============================================================================
# @author: Daniel V. Stankevich
# @organization: RMIT, School of Computer Science, 2012
#
#
# This package contains representations of the following models:
# 'Particle' - an atomic element
# 'Swarm' - a set of particles
# 'Neighbourhood' - particles topology
# 'KnapsackSolution' - representation for solution of the problem
# 'TSPSolution' - representation for solution of the problem
#===============================================================================
#===============================================================================
# GENERIC MODELS
#===============================================================================
#---- Particle representation
class ParticleModel:
_position = None
_velocity = None
_bestPosition = None
_nbBestPosition = None
_fitness = -1
def __init__(self):
self._position = None
self._velocity = None
self._bestPosition = None
self._nbBestPosition = None
self._fitness = -1
#---- Swarm representation
class SwarmModel:
_particles = None
_neighbourhoods = None
_bestPosition = None
_bestPositionFitness = -1
def __init__(self):
self._particles = []
self._neighbourhoods = None
self._bestPosition = None
self._bestPositionFitness = -1
#---- Neighbourhood representation
class NeighbourhoodModel:
_particles = []
_bestPosition = None
_bestPositionFitness = -1
def __init__(self, particles):
self._particles = particles
self._bestPosition = None
self._bestPositionFitness = -1
#===============================================================================
# PROBLEM SPECIFIC MODELS
#===============================================================================
#---- Knapsack Problem Solution representation
class KnapsackSolutionModel:
_items = []
_knapsackSize = None
def __init__(self, items, size):
self._items = items
self._knapsackSize = size
#---- TSP Problem Solution representation
class TSPSolutionModel:
_edges = {}
_startNode = None
_numOfCities = None
_bestPath = []
def __init__(self, edges, numOfCities, startNode):
self._edges = edges
self._numOfCities = numOfCities
self._startNode = startNode
|
flexible
|
{
"blob_id": "5c06229f8e80a7225620f25941cc5276a9021e53",
"index": 5353,
"step-1": "<mask token>\n\n\nclass SwarmModel:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass NeighbourhoodModel:\n _particles = []\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self, particles):\n self._particles = particles\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass KnapsackSolutionModel:\n _items = []\n _knapsackSize = None\n\n def __init__(self, items, size):\n self._items = items\n self._knapsackSize = size\n\n\nclass TSPSolutionModel:\n _edges = {}\n _startNode = None\n _numOfCities = None\n _bestPath = []\n\n def __init__(self, edges, numOfCities, startNode):\n self._edges = edges\n self._numOfCities = numOfCities\n self._startNode = startNode\n",
"step-2": "<mask token>\n\n\nclass SwarmModel:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self):\n self._particles = []\n self._neighbourhoods = None\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass NeighbourhoodModel:\n _particles = []\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self, particles):\n self._particles = particles\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass KnapsackSolutionModel:\n _items = []\n _knapsackSize = None\n\n def __init__(self, items, size):\n self._items = items\n self._knapsackSize = size\n\n\nclass TSPSolutionModel:\n _edges = {}\n _startNode = None\n _numOfCities = None\n _bestPath = []\n\n def __init__(self, edges, numOfCities, startNode):\n self._edges = edges\n self._numOfCities = numOfCities\n self._startNode = startNode\n",
"step-3": "<mask token>\n\n\nclass SwarmModel:\n _particles = None\n _neighbourhoods = None\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self):\n self._particles = []\n self._neighbourhoods = None\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass NeighbourhoodModel:\n _particles = []\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self, particles):\n self._particles = particles\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass KnapsackSolutionModel:\n _items = []\n _knapsackSize = None\n\n def __init__(self, items, size):\n self._items = items\n self._knapsackSize = size\n\n\nclass TSPSolutionModel:\n _edges = {}\n _startNode = None\n _numOfCities = None\n _bestPath = []\n\n def __init__(self, edges, numOfCities, startNode):\n self._edges = edges\n self._numOfCities = numOfCities\n self._startNode = startNode\n",
"step-4": "class ParticleModel:\n _position = None\n _velocity = None\n _bestPosition = None\n _nbBestPosition = None\n _fitness = -1\n\n def __init__(self):\n self._position = None\n self._velocity = None\n self._bestPosition = None\n self._nbBestPosition = None\n self._fitness = -1\n\n\nclass SwarmModel:\n _particles = None\n _neighbourhoods = None\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self):\n self._particles = []\n self._neighbourhoods = None\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass NeighbourhoodModel:\n _particles = []\n _bestPosition = None\n _bestPositionFitness = -1\n\n def __init__(self, particles):\n self._particles = particles\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\nclass KnapsackSolutionModel:\n _items = []\n _knapsackSize = None\n\n def __init__(self, items, size):\n self._items = items\n self._knapsackSize = size\n\n\nclass TSPSolutionModel:\n _edges = {}\n _startNode = None\n _numOfCities = None\n _bestPath = []\n\n def __init__(self, edges, numOfCities, startNode):\n self._edges = edges\n self._numOfCities = numOfCities\n self._startNode = startNode\n",
"step-5": "#===============================================================================\n# @author: Daniel V. Stankevich\n# @organization: RMIT, School of Computer Science, 2012\n#\n#\n# This package contains representations of the following models:\n# 'Particle' - an atomic element\n# 'Swarm' - a set of particles\n# 'Neighbourhood' - particles topology\n# 'KnapsackSolution' - representation for solution of the problem\n# 'TSPSolution' - representation for solution of the problem\n#===============================================================================\n\n\n\n#===============================================================================\n# GENERIC MODELS\n#===============================================================================\n\n#---- Particle representation\nclass ParticleModel:\n _position = None\n _velocity = None\n _bestPosition = None\n _nbBestPosition = None\n _fitness = -1\n\n def __init__(self):\n self._position = None\n self._velocity = None\n self._bestPosition = None\n self._nbBestPosition = None\n self._fitness = -1\n\n#---- Swarm representation\nclass SwarmModel:\n _particles = None\n _neighbourhoods = None\n _bestPosition = None\n _bestPositionFitness = -1\n \n def __init__(self):\n self._particles = []\n self._neighbourhoods = None\n self._bestPosition = None\n self._bestPositionFitness = -1\n \n\n#---- Neighbourhood representation \nclass NeighbourhoodModel:\n _particles = []\n _bestPosition = None\n _bestPositionFitness = -1\n \n def __init__(self, particles):\n self._particles = particles\n self._bestPosition = None\n self._bestPositionFitness = -1\n\n\n#===============================================================================\n# PROBLEM SPECIFIC MODELS\n#===============================================================================\n\n#---- Knapsack Problem Solution representation \nclass KnapsackSolutionModel:\n _items = [] \n _knapsackSize = None\n \n def __init__(self, items, size):\n self._items = items\n self._knapsackSize = size\n\n#---- TSP Problem Solution representation\nclass TSPSolutionModel:\n _edges = {}\n _startNode = None\n _numOfCities = None\n _bestPath = []\n \n def __init__(self, edges, numOfCities, startNode):\n self._edges = edges\n self._numOfCities = numOfCities\n self._startNode = startNode",
"step-ids": [
10,
11,
12,
15,
16
]
}
|
[
10,
11,
12,
15,
16
] |
"""
module : watcher.py
description : Script to automatically watch a directory (via watchdog) for tests and run them via py.test
"""
import sys
import os.path
import subprocess
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
class SpecificationsEventHandler(FileSystemEventHandler):
"""Runs the tests inside the specifications class when any specification file is modified
"""
def __init__(self):
self.paused = False
self.banner = "============================================================"
def on_modified(self, event):
super(SpecificationsEventHandler, self).on_modified(event)
"""
Description:
Catches the file modified event from the watchdog package and
creates the full path to the file for submission to the test engine
of choice.
Args:
event: Contains the information for the file system event
when modification has occurred
"""
# file modified triggers directory modified as well...
if event.is_directory:
return
if self.paused:
return
if event.src_path.endswith("_specs.py") and not self.paused:
self.paused = True
#filename = os.path.basename(event.src_path)
directory = os.path.abspath(os.path.dirname(event.src_path))
filename = os.path.basename(event.src_path)
file = os.path.join(directory, filename)
print(self.banner, end="\n")
print("testing specifications found in file: {0}".format(file))
print("")
# if using pytest, uncomment the line below
#subprocess.call(['py.test', '-v', file], shell=True)
#using mamba as the test engine:
subprocess.call(['mamba', file], shell=True)
print(self.banner, end="\n")
self.paused = False
return
if __name__ == "__main__":
path = sys.argv[1]
event_handler = SpecificationsEventHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
normal
|
{
"blob_id": "95ea8a21d3ac44c7760179bc4ebf67f0c16e6a19",
"index": 2421,
"step-1": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n <mask token>\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join()\n",
"step-4": "<mask token>\nimport sys\nimport os.path\nimport subprocess\nimport time\nfrom watchdog.observers import Observer\nfrom watchdog.events import FileSystemEventHandler\n\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n \"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\n def __init__(self):\n self.paused = False\n self.banner = (\n '============================================================')\n\n def on_modified(self, event):\n super(SpecificationsEventHandler, self).on_modified(event)\n \"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n if event.is_directory:\n return\n if self.paused:\n return\n if event.src_path.endswith('_specs.py') and not self.paused:\n self.paused = True\n directory = os.path.abspath(os.path.dirname(event.src_path))\n filename = os.path.basename(event.src_path)\n file = os.path.join(directory, filename)\n print(self.banner, end='\\n')\n print('testing specifications found in file: {0}'.format(file))\n print('')\n subprocess.call(['mamba', file], shell=True)\n print(self.banner, end='\\n')\n self.paused = False\n return\n\n\nif __name__ == '__main__':\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join()\n",
"step-5": "\"\"\"\nmodule\t\t\t: watcher.py\ndescription\t: Script to automatically watch a directory (via watchdog) for tests and run them via py.test\n\"\"\"\nimport sys\nimport os.path\nimport subprocess\nimport time\nfrom watchdog.observers import Observer\nfrom watchdog.events import FileSystemEventHandler\n\nclass SpecificationsEventHandler(FileSystemEventHandler):\n\t\"\"\"Runs the tests inside the specifications class when any specification file is modified\n\t\"\"\"\n\t\n\tdef __init__(self): \n\t\tself.paused = False\n\t\tself.banner = \"============================================================\"\n \n\tdef on_modified(self, event):\n\t\tsuper(SpecificationsEventHandler, self).on_modified(event)\n\t\t\"\"\"\n\t\t\tDescription:\n\t\t\t\tCatches the file modified event from the watchdog package and \n\t\t\t\tcreates the full path to the file for submission to the test engine \n\t\t\t\tof choice.\n\t\t\t\t\n\t\t\tArgs:\n\t\t\t\tevent: Contains the information for the file system event \n\t\t\t\twhen modification has occurred\n\t\t\"\"\"\n\t\t\n\t\t\n\t\t# file modified triggers directory modified as well...\t\t\n\t\tif event.is_directory:\n\t\t\treturn\n\n\t\tif self.paused: \n\t\t\treturn\n\n\t\tif event.src_path.endswith(\"_specs.py\") and not self.paused:\n\t\t\tself.paused = True\n\t\t\t#filename = os.path.basename(event.src_path)\n\t\t\tdirectory = os.path.abspath(os.path.dirname(event.src_path))\n\t\t\tfilename = os.path.basename(event.src_path)\n\t\t\tfile = os.path.join(directory, filename)\n\n\t\t\tprint(self.banner, end=\"\\n\")\n\t\t\tprint(\"testing specifications found in file: {0}\".format(file))\n\t\t\tprint(\"\")\n\t\t\t\n\t\t\t# if using pytest, uncomment the line below\n\t\t\t#subprocess.call(['py.test', '-v', file], shell=True)\t\n\t\t\t\n\t\t\t#using mamba as the test engine:\n\t\t\tsubprocess.call(['mamba', file], shell=True)\t\n\n\t\t\tprint(self.banner, end=\"\\n\")\n\n\t\t\tself.paused = False\n\t\t\treturn\n\n\nif __name__ == \"__main__\":\n path = sys.argv[1]\n event_handler = SpecificationsEventHandler()\n observer = Observer()\n observer.schedule(event_handler, path, recursive=True)\n observer.start()\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n observer.stop()\n observer.join() \n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def trans_screen():
pyautogui.doubleClick(492, 974)
pyautogui.typewrite(['enter'], 0.01)
def trans_chinese():
for c_rubbish in chinese_rubbish:
pin = p.get_pinyin(c_rubbish, '')
pin_list = list(pin)
pin_list.append('1')
rubbish_set.append(pin_list)
def send_rubbish():
for p_rubbish in rubbish_set:
pyautogui.typewrite(p_rubbish, 0.01)
pyautogui.typewrite(['enter'], 0.01)
def chk_rubbish():
for p_dirty in rubbish_set:
print(p_dirty)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if rubbish_dic == 0:
chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'
elif rubbish_dic == 1:
rubbish_file = open('rubbish_dic.txt')
chinese_rubbish = rubbish_file.read().splitlines()
<|reserved_special_token_0|>
def trans_screen():
pyautogui.doubleClick(492, 974)
pyautogui.typewrite(['enter'], 0.01)
def trans_chinese():
for c_rubbish in chinese_rubbish:
pin = p.get_pinyin(c_rubbish, '')
pin_list = list(pin)
pin_list.append('1')
rubbish_set.append(pin_list)
def send_rubbish():
for p_rubbish in rubbish_set:
pyautogui.typewrite(p_rubbish, 0.01)
pyautogui.typewrite(['enter'], 0.01)
def chk_rubbish():
for p_dirty in rubbish_set:
print(p_dirty)
if __name__ == '__main__':
trans_chinese()
trans_screen()
send_rubbish()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
rubbish_dic = 1
if rubbish_dic == 0:
chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'
elif rubbish_dic == 1:
rubbish_file = open('rubbish_dic.txt')
chinese_rubbish = rubbish_file.read().splitlines()
rubbish_set = []
p = Pinyin()
def trans_screen():
pyautogui.doubleClick(492, 974)
pyautogui.typewrite(['enter'], 0.01)
def trans_chinese():
for c_rubbish in chinese_rubbish:
pin = p.get_pinyin(c_rubbish, '')
pin_list = list(pin)
pin_list.append('1')
rubbish_set.append(pin_list)
def send_rubbish():
for p_rubbish in rubbish_set:
pyautogui.typewrite(p_rubbish, 0.01)
pyautogui.typewrite(['enter'], 0.01)
def chk_rubbish():
for p_dirty in rubbish_set:
print(p_dirty)
if __name__ == '__main__':
trans_chinese()
trans_screen()
send_rubbish()
<|reserved_special_token_1|>
import pyautogui
from xpinyin import Pinyin
rubbish_dic = 1
if rubbish_dic == 0:
chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'
elif rubbish_dic == 1:
rubbish_file = open('rubbish_dic.txt')
chinese_rubbish = rubbish_file.read().splitlines()
rubbish_set = []
p = Pinyin()
def trans_screen():
pyautogui.doubleClick(492, 974)
pyautogui.typewrite(['enter'], 0.01)
def trans_chinese():
for c_rubbish in chinese_rubbish:
pin = p.get_pinyin(c_rubbish, '')
pin_list = list(pin)
pin_list.append('1')
rubbish_set.append(pin_list)
def send_rubbish():
for p_rubbish in rubbish_set:
pyautogui.typewrite(p_rubbish, 0.01)
pyautogui.typewrite(['enter'], 0.01)
def chk_rubbish():
for p_dirty in rubbish_set:
print(p_dirty)
if __name__ == '__main__':
trans_chinese()
trans_screen()
send_rubbish()
<|reserved_special_token_1|>
# coding=utf-8
import pyautogui
from xpinyin import Pinyin
rubbish_dic=1
if rubbish_dic==0:
chinese_rubbish=(
u"草泥马",
u"你妈死了",
u"你是不是",
u"低能",
u"人话都听不懂",
u"没家教的狗东西",
)
elif rubbish_dic==1:
rubbish_file=open("rubbish_dic.txt")
chinese_rubbish=rubbish_file.read().splitlines()
rubbish_set=[] #最终的拼音方式
p=Pinyin() #用于转换拼音
#通过点击的方式切屏
def trans_screen():
pyautogui.doubleClick(492,974)
pyautogui.typewrite(['enter'],0.01)
#将中文转化成拼音
def trans_chinese():
for c_rubbish in chinese_rubbish:
pin=p.get_pinyin(c_rubbish,'')
pin_list=list(pin)
pin_list.append("1")
rubbish_set.append(pin_list)
#发送text
def send_rubbish():
for p_rubbish in rubbish_set:
pyautogui.typewrite(p_rubbish,0.01)
pyautogui.typewrite(['enter'],0.01)
#查看当前的rubbish_set内容
def chk_rubbish():
for p_dirty in rubbish_set:
print(p_dirty)
if __name__ == "__main__":
trans_chinese()
#chk_rubbish()
trans_screen()
send_rubbish()
|
flexible
|
{
"blob_id": "23e673909b2f1eb9a265ce84ad63464e20e99c6a",
"index": 3449,
"step-1": "<mask token>\n\n\ndef trans_screen():\n pyautogui.doubleClick(492, 974)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef trans_chinese():\n for c_rubbish in chinese_rubbish:\n pin = p.get_pinyin(c_rubbish, '')\n pin_list = list(pin)\n pin_list.append('1')\n rubbish_set.append(pin_list)\n\n\ndef send_rubbish():\n for p_rubbish in rubbish_set:\n pyautogui.typewrite(p_rubbish, 0.01)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef chk_rubbish():\n for p_dirty in rubbish_set:\n print(p_dirty)\n\n\n<mask token>\n",
"step-2": "<mask token>\nif rubbish_dic == 0:\n chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'\nelif rubbish_dic == 1:\n rubbish_file = open('rubbish_dic.txt')\n chinese_rubbish = rubbish_file.read().splitlines()\n<mask token>\n\n\ndef trans_screen():\n pyautogui.doubleClick(492, 974)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef trans_chinese():\n for c_rubbish in chinese_rubbish:\n pin = p.get_pinyin(c_rubbish, '')\n pin_list = list(pin)\n pin_list.append('1')\n rubbish_set.append(pin_list)\n\n\ndef send_rubbish():\n for p_rubbish in rubbish_set:\n pyautogui.typewrite(p_rubbish, 0.01)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef chk_rubbish():\n for p_dirty in rubbish_set:\n print(p_dirty)\n\n\nif __name__ == '__main__':\n trans_chinese()\n trans_screen()\n send_rubbish()\n",
"step-3": "<mask token>\nrubbish_dic = 1\nif rubbish_dic == 0:\n chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'\nelif rubbish_dic == 1:\n rubbish_file = open('rubbish_dic.txt')\n chinese_rubbish = rubbish_file.read().splitlines()\nrubbish_set = []\np = Pinyin()\n\n\ndef trans_screen():\n pyautogui.doubleClick(492, 974)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef trans_chinese():\n for c_rubbish in chinese_rubbish:\n pin = p.get_pinyin(c_rubbish, '')\n pin_list = list(pin)\n pin_list.append('1')\n rubbish_set.append(pin_list)\n\n\ndef send_rubbish():\n for p_rubbish in rubbish_set:\n pyautogui.typewrite(p_rubbish, 0.01)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef chk_rubbish():\n for p_dirty in rubbish_set:\n print(p_dirty)\n\n\nif __name__ == '__main__':\n trans_chinese()\n trans_screen()\n send_rubbish()\n",
"step-4": "import pyautogui\nfrom xpinyin import Pinyin\nrubbish_dic = 1\nif rubbish_dic == 0:\n chinese_rubbish = u'草泥马', u'你妈死了', u'你是不是', u'低能', u'人话都听不懂', u'没家教的狗东西'\nelif rubbish_dic == 1:\n rubbish_file = open('rubbish_dic.txt')\n chinese_rubbish = rubbish_file.read().splitlines()\nrubbish_set = []\np = Pinyin()\n\n\ndef trans_screen():\n pyautogui.doubleClick(492, 974)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef trans_chinese():\n for c_rubbish in chinese_rubbish:\n pin = p.get_pinyin(c_rubbish, '')\n pin_list = list(pin)\n pin_list.append('1')\n rubbish_set.append(pin_list)\n\n\ndef send_rubbish():\n for p_rubbish in rubbish_set:\n pyautogui.typewrite(p_rubbish, 0.01)\n pyautogui.typewrite(['enter'], 0.01)\n\n\ndef chk_rubbish():\n for p_dirty in rubbish_set:\n print(p_dirty)\n\n\nif __name__ == '__main__':\n trans_chinese()\n trans_screen()\n send_rubbish()\n",
"step-5": "# coding=utf-8\nimport pyautogui\nfrom xpinyin import Pinyin\n\nrubbish_dic=1\n\nif rubbish_dic==0:\n chinese_rubbish=(\n u\"草泥马\",\n u\"你妈死了\",\n u\"你是不是\",\n u\"低能\",\n u\"人话都听不懂\",\n u\"没家教的狗东西\", \n )\nelif rubbish_dic==1:\n rubbish_file=open(\"rubbish_dic.txt\")\n chinese_rubbish=rubbish_file.read().splitlines()\n\n\nrubbish_set=[] #最终的拼音方式\np=Pinyin() #用于转换拼音\n\n#通过点击的方式切屏 \ndef trans_screen():\n pyautogui.doubleClick(492,974)\n pyautogui.typewrite(['enter'],0.01)\n\n#将中文转化成拼音\ndef trans_chinese():\n for c_rubbish in chinese_rubbish:\n pin=p.get_pinyin(c_rubbish,'')\n pin_list=list(pin)\n pin_list.append(\"1\")\n rubbish_set.append(pin_list)\n\n#发送text\ndef send_rubbish(): \n for p_rubbish in rubbish_set:\n pyautogui.typewrite(p_rubbish,0.01)\n pyautogui.typewrite(['enter'],0.01)\n\n#查看当前的rubbish_set内容\ndef chk_rubbish():\n for p_dirty in rubbish_set:\n print(p_dirty)\n\nif __name__ == \"__main__\":\n trans_chinese()\n #chk_rubbish()\n trans_screen()\n send_rubbish()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
def selectionSort(arr, low, high):
for i in range(len(arr)):
mini = i
for j in range(i + 1, len(arr)):
if arr[mini] > arr[j]:
mini = j
arr[i], arr[mini] = arr[mini], arr[i]
return arr
|
normal
|
{
"blob_id": "c91be6cc332139c5b1e7ee5a3512482d0f8620b1",
"index": 7322,
"step-1": "<mask token>\n",
"step-2": "def selectionSort(arr, low, high):\n for i in range(len(arr)):\n mini = i\n for j in range(i + 1, len(arr)):\n if arr[mini] > arr[j]:\n mini = j\n arr[i], arr[mini] = arr[mini], arr[i]\n return arr\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
#!/usr/bin/env python3
#
# main.py - By Steven Chen Hao Nyeo
# Graphical interface for Socionics Engine
# Created: August 8, 2019
import wx
from cognitive_function import *
from entity import Entity
from function_to_type import Translator
from function_analysis import *
class TypeFrame(wx.Frame):
def __init__(self, parent, title):
# Create Frame
wx.Frame.__init__(self, parent, title = title, size = (530, 480), style = wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.panel = wx.Panel(self)
# The current list of cognitive functions entered into the system
self.entityList = []
# Arrays containing the rows of buttons for dominant and auxiliary functions
self.domButtons = []
self.auxButtons = []
# Keep track of the current row of buttons to enable
self.rowCount = 0
# Setup for program interface
self.row_1_y = 30
self.row_2_y = 90
self.row_3_y = 150
wx.StaticText(self.panel, label = "Dominant Function:", pos = (30, self.row_1_y - 20))
self.createCogButtons(0)
wx.StaticText(self.panel, label = "Auxiliary Function:", pos = (30, self.row_2_y - 20))
self.createCogButtons(1)
# The function that creates the buttons for the eight cognitive functions
def createCogButtons(self, row):
# Keeps track of creation of dominant or auxiliary buttons
cogButtons = self.domButtons if row == 0 else self.auxButtons
# Create and bind the buttons to the event
labels = ["N", "S", "T", "F"]
for i in range(4):
cogButtons.append(wx.Button(self.panel, label = labels[i] + "i", size = (50, 30) , pos = (30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))
cogButtons.append(wx.Button(self.panel, label = labels[i] + "e", size = (50, 30) , pos = (90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))
for i in range(8):
self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])
# The auxiliary buttons are disabled before the dominant function is entered
if (row == 1):
for button in self.auxButtons:
button.Disable()
# The event handler for clicking on the buttons
def onclick_cogFunction(self, event):
btnLabel = event.GetEventObject().GetLabel()
# First row - dominant function
if (self.rowCount == 0):
# Disable the dominant function buttons
self.rowCount = 1
self.entityList.append(self.labelToFunction(btnLabel))
for button in self.domButtons:
button.Disable()
# Re-enable the appropriate auxiliary function buttons
for button in self.auxButtons:
if (button.Label[1] == self.entityList[0].opposite().sublabel
and button.Label[0] != self.entityList[0].opposite_orientation().label
and button.Label[0] != self.entityList[0].label):
button.Enable()
# Second row - auxiliary function
else:
self.entityList.append(self.labelToFunction(btnLabel))
for button in self.auxButtons:
button.Disable()
if (len(self.entityList) == 2):
e = Entity(self.entityList)
print(Translator.translate_orientation(e) +
Translator.translate_observing(e) +
Translator.translate_decision_making(e) +
Translator.translate_perception(e))
# The helper functin that returns the corresponding function object according to the entered string
def labelToFunction(self, btnLabel):
if (btnLabel == "Ni"):
return Ni
elif (btnLabel == "Ne"):
return Ne
elif (btnLabel == "Si"):
return Si
elif (btnLabel == "Se"):
return Se
elif (btnLabel == "Ti"):
return Ti
elif (btnLabel == "Te"):
return Te
elif (btnLabel == "Fi"):
return Fi
elif (btnLabel == "Fe"):
return Fe
if __name__ == "__main__":
app = wx.App()
frame = TypeFrame(None, title = "Socionics Engine")
frame.Show()
app.MainLoop()
|
normal
|
{
"blob_id": "519dbe97ce9de30e616d660ef168e686c52b01b5",
"index": 5452,
"step-1": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n <mask token>\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\nif __name__ == '__main__':\n app = wx.App()\n frame = TypeFrame(None, title='Socionics Engine')\n frame.Show()\n app.MainLoop()\n",
"step-4": "import wx\nfrom cognitive_function import *\nfrom entity import Entity\nfrom function_to_type import Translator\nfrom function_analysis import *\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\nif __name__ == '__main__':\n app = wx.App()\n frame = TypeFrame(None, title='Socionics Engine')\n frame.Show()\n app.MainLoop()\n",
"step-5": "#!/usr/bin/env python3\n#\n# main.py - By Steven Chen Hao Nyeo \n# Graphical interface for Socionics Engine \n# Created: August 8, 2019\n\nimport wx\nfrom cognitive_function import *\nfrom entity import Entity\nfrom function_to_type import Translator\nfrom function_analysis import *\n\nclass TypeFrame(wx.Frame):\n def __init__(self, parent, title):\n \n # Create Frame\n wx.Frame.__init__(self, parent, title = title, size = (530, 480), style = wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n \n # The current list of cognitive functions entered into the system \n self.entityList = []\n\n # Arrays containing the rows of buttons for dominant and auxiliary functions\n self.domButtons = []\n self.auxButtons = []\n\n # Keep track of the current row of buttons to enable\n self.rowCount = 0\n\n # Setup for program interface\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label = \"Dominant Function:\", pos = (30, self.row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label = \"Auxiliary Function:\", pos = (30, self.row_2_y - 20))\n self.createCogButtons(1)\n\n # The function that creates the buttons for the eight cognitive functions\n def createCogButtons(self, row):\n\n # Keeps track of creation of dominant or auxiliary buttons\n cogButtons = self.domButtons if row == 0 else self.auxButtons \n \n # Create and bind the buttons to the event\n labels = [\"N\", \"S\", \"T\", \"F\"]\n for i in range(4): \n cogButtons.append(wx.Button(self.panel, label = labels[i] + \"i\", size = (50, 30) , pos = (30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label = labels[i] + \"e\", size = (50, 30) , pos = (90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n\n # The auxiliary buttons are disabled before the dominant function is entered\n if (row == 1): \n for button in self.auxButtons:\n button.Disable()\n\n # The event handler for clicking on the buttons\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n\n # First row - dominant function\n if (self.rowCount == 0):\n\n # Disable the dominant function buttons\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n\n # Re-enable the appropriate auxiliary function buttons\n for button in self.auxButtons:\n if (button.Label[1] == self.entityList[0].opposite().sublabel \n and button.Label[0] != self.entityList[0].opposite_orientation().label\n and button.Label[0] != self.entityList[0].label):\n button.Enable()\n\n # Second row - auxiliary function\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n\n if (len(self.entityList) == 2):\n e = Entity(self.entityList)\n\n print(Translator.translate_orientation(e) +\n Translator.translate_observing(e) +\n Translator.translate_decision_making(e) +\n Translator.translate_perception(e))\n\n # The helper functin that returns the corresponding function object according to the entered string\n def labelToFunction(self, btnLabel):\n if (btnLabel == \"Ni\"): \n return Ni\n elif (btnLabel == \"Ne\"): \n return Ne\n elif (btnLabel == \"Si\"): \n return Si\n elif (btnLabel == \"Se\"): \n return Se\n elif (btnLabel == \"Ti\"): \n return Ti\n elif (btnLabel == \"Te\"): \n return Te\n elif (btnLabel == \"Fi\"): \n return Fi\n elif (btnLabel == \"Fe\"): \n return Fe\n\nif __name__ == \"__main__\":\n app = wx.App()\n frame = TypeFrame(None, title = \"Socionics Engine\")\n frame.Show()\n app.MainLoop()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parser.add_argument('-p', type=str, default='default', help=
'name of a a policy file')
parser.add_argument('-n', type=int, default=100000, help='number of patients')
<|reserved_special_token_0|>
if len(matchingPolicies) == 0:
raise SystemExit(f'No matching policy named {policyName}')
elif len(matchingPolicies) > 1:
raise SystemExit(
f'Multiple matching policies for {policyName}: {matchingPolicies}')
<|reserved_special_token_0|>
with open(policyFile, 'r') as stream:
policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',
d.keys())(*d.values()))
<|reserved_special_token_0|>
with open(OutputFile, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
keys = ['Number on Private Insurance:', 'Number on Medicare:',
'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',
'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']
for key in keys:
row = [key] + results['runSummary'][key]
writer.writerow(row)
patients = results['patients']
writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',
'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',
'Diagnosed', 'Controlled', 'Deceased'])
for m in range(len(patients)):
writer.writerow([m, patients[m].age, patients[m].ethnicity,
patients[m].gender, patients[m].education, patients[m].income,
patients[m].IPR, patients[m].QALY, patients[m].diabetes,
patients[m].diagnosed, patients[m].controlled, patients[m].
deceased])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
OutputFile = './HealthSimOutputSheet.csv'
parser = argparse.ArgumentParser(description='Select policy file')
parser.add_argument('-p', type=str, default='default', help=
'name of a a policy file')
parser.add_argument('-n', type=int, default=100000, help='number of patients')
args = parser.parse_args()
NumPatients = args.n
policyName = args.p
matchingPolicies = glob.glob(f'./policies/{policyName}*')
if len(matchingPolicies) == 0:
raise SystemExit(f'No matching policy named {policyName}')
elif len(matchingPolicies) > 1:
raise SystemExit(
f'Multiple matching policies for {policyName}: {matchingPolicies}')
policyFile = matchingPolicies[0]
with open(policyFile, 'r') as stream:
policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',
d.keys())(*d.values()))
results = runModel(policySettings, NumPatients)
with open(OutputFile, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
keys = ['Number on Private Insurance:', 'Number on Medicare:',
'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',
'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']
for key in keys:
row = [key] + results['runSummary'][key]
writer.writerow(row)
patients = results['patients']
writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',
'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',
'Diagnosed', 'Controlled', 'Deceased'])
for m in range(len(patients)):
writer.writerow([m, patients[m].age, patients[m].ethnicity,
patients[m].gender, patients[m].education, patients[m].income,
patients[m].IPR, patients[m].QALY, patients[m].diabetes,
patients[m].diagnosed, patients[m].controlled, patients[m].
deceased])
<|reserved_special_token_1|>
import json
import glob
import argparse
from model.NewModel import runModel
from collections import namedtuple
import csv
OutputFile = './HealthSimOutputSheet.csv'
parser = argparse.ArgumentParser(description='Select policy file')
parser.add_argument('-p', type=str, default='default', help=
'name of a a policy file')
parser.add_argument('-n', type=int, default=100000, help='number of patients')
args = parser.parse_args()
NumPatients = args.n
policyName = args.p
matchingPolicies = glob.glob(f'./policies/{policyName}*')
if len(matchingPolicies) == 0:
raise SystemExit(f'No matching policy named {policyName}')
elif len(matchingPolicies) > 1:
raise SystemExit(
f'Multiple matching policies for {policyName}: {matchingPolicies}')
policyFile = matchingPolicies[0]
with open(policyFile, 'r') as stream:
policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',
d.keys())(*d.values()))
results = runModel(policySettings, NumPatients)
with open(OutputFile, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
keys = ['Number on Private Insurance:', 'Number on Medicare:',
'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',
'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']
for key in keys:
row = [key] + results['runSummary'][key]
writer.writerow(row)
patients = results['patients']
writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',
'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',
'Diagnosed', 'Controlled', 'Deceased'])
for m in range(len(patients)):
writer.writerow([m, patients[m].age, patients[m].ethnicity,
patients[m].gender, patients[m].education, patients[m].income,
patients[m].IPR, patients[m].QALY, patients[m].diabetes,
patients[m].diagnosed, patients[m].controlled, patients[m].
deceased])
<|reserved_special_token_1|>
import json
import glob
import argparse
from model.NewModel import runModel
from collections import namedtuple
import csv
OutputFile = "./HealthSimOutputSheet.csv"
parser = argparse.ArgumentParser(description='Select policy file')
parser.add_argument('-p', type=str, default='default', help='name of a a policy file')
parser.add_argument('-n', type=int, default=100000, help='number of patients')
args = parser.parse_args()
NumPatients = args.n
policyName = args.p
matchingPolicies = glob.glob(f"./policies/{policyName}*")
if len(matchingPolicies) == 0:
raise SystemExit(f"No matching policy named {policyName}")
elif len(matchingPolicies) > 1:
raise SystemExit(f"Multiple matching policies for {policyName}: {matchingPolicies}")
policyFile = matchingPolicies[0]
with open(policyFile, 'r') as stream:
# magic to turn json into an object instead of a dict
# https://stackoverflow.com/a/15882054
policySettings = json.load(stream, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
results = runModel(policySettings, NumPatients)
with open(OutputFile, 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
keys = ["Number on Private Insurance:", "Number on Medicare:",
"Number on Medicaid:", "Number of Uninsured:",
"Private Premium:", "Medicare Premium:",
"Medicare Funds:", "Medicaid Funds:"]
for key in keys:
row = [key] + results['runSummary'][key]
writer.writerow(row)
patients = results['patients']
writer.writerow(["Patient ID", "Age", "Ethnicity", "Gender", "Education", "Income", "Income Bracket", "QALY", "Diabetes", "Diagnosed", "Controlled", "Deceased"])
for m in range(len(patients)):
writer.writerow([m, patients[m].age, patients[m].ethnicity, patients[m].gender, patients[m].education, patients[m].income, patients[m].IPR, patients[m].QALY, patients[m].diabetes, patients[m].diagnosed, patients[m].controlled, patients[m].deceased])
|
flexible
|
{
"blob_id": "894ce07c6443208483be2d3ef1409f12f24d99f3",
"index": 2852,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('-p', type=str, default='default', help=\n 'name of a a policy file')\nparser.add_argument('-n', type=int, default=100000, help='number of patients')\n<mask token>\nif len(matchingPolicies) == 0:\n raise SystemExit(f'No matching policy named {policyName}')\nelif len(matchingPolicies) > 1:\n raise SystemExit(\n f'Multiple matching policies for {policyName}: {matchingPolicies}')\n<mask token>\nwith open(policyFile, 'r') as stream:\n policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',\n d.keys())(*d.values()))\n<mask token>\nwith open(OutputFile, 'w', newline='') as csvfile:\n writer = csv.writer(csvfile)\n keys = ['Number on Private Insurance:', 'Number on Medicare:',\n 'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',\n 'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']\n for key in keys:\n row = [key] + results['runSummary'][key]\n writer.writerow(row)\n patients = results['patients']\n writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',\n 'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',\n 'Diagnosed', 'Controlled', 'Deceased'])\n for m in range(len(patients)):\n writer.writerow([m, patients[m].age, patients[m].ethnicity,\n patients[m].gender, patients[m].education, patients[m].income,\n patients[m].IPR, patients[m].QALY, patients[m].diabetes,\n patients[m].diagnosed, patients[m].controlled, patients[m].\n deceased])\n",
"step-3": "<mask token>\nOutputFile = './HealthSimOutputSheet.csv'\nparser = argparse.ArgumentParser(description='Select policy file')\nparser.add_argument('-p', type=str, default='default', help=\n 'name of a a policy file')\nparser.add_argument('-n', type=int, default=100000, help='number of patients')\nargs = parser.parse_args()\nNumPatients = args.n\npolicyName = args.p\nmatchingPolicies = glob.glob(f'./policies/{policyName}*')\nif len(matchingPolicies) == 0:\n raise SystemExit(f'No matching policy named {policyName}')\nelif len(matchingPolicies) > 1:\n raise SystemExit(\n f'Multiple matching policies for {policyName}: {matchingPolicies}')\npolicyFile = matchingPolicies[0]\nwith open(policyFile, 'r') as stream:\n policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',\n d.keys())(*d.values()))\nresults = runModel(policySettings, NumPatients)\nwith open(OutputFile, 'w', newline='') as csvfile:\n writer = csv.writer(csvfile)\n keys = ['Number on Private Insurance:', 'Number on Medicare:',\n 'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',\n 'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']\n for key in keys:\n row = [key] + results['runSummary'][key]\n writer.writerow(row)\n patients = results['patients']\n writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',\n 'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',\n 'Diagnosed', 'Controlled', 'Deceased'])\n for m in range(len(patients)):\n writer.writerow([m, patients[m].age, patients[m].ethnicity,\n patients[m].gender, patients[m].education, patients[m].income,\n patients[m].IPR, patients[m].QALY, patients[m].diabetes,\n patients[m].diagnosed, patients[m].controlled, patients[m].\n deceased])\n",
"step-4": "import json\nimport glob\nimport argparse\nfrom model.NewModel import runModel\nfrom collections import namedtuple\nimport csv\nOutputFile = './HealthSimOutputSheet.csv'\nparser = argparse.ArgumentParser(description='Select policy file')\nparser.add_argument('-p', type=str, default='default', help=\n 'name of a a policy file')\nparser.add_argument('-n', type=int, default=100000, help='number of patients')\nargs = parser.parse_args()\nNumPatients = args.n\npolicyName = args.p\nmatchingPolicies = glob.glob(f'./policies/{policyName}*')\nif len(matchingPolicies) == 0:\n raise SystemExit(f'No matching policy named {policyName}')\nelif len(matchingPolicies) > 1:\n raise SystemExit(\n f'Multiple matching policies for {policyName}: {matchingPolicies}')\npolicyFile = matchingPolicies[0]\nwith open(policyFile, 'r') as stream:\n policySettings = json.load(stream, object_hook=lambda d: namedtuple('X',\n d.keys())(*d.values()))\nresults = runModel(policySettings, NumPatients)\nwith open(OutputFile, 'w', newline='') as csvfile:\n writer = csv.writer(csvfile)\n keys = ['Number on Private Insurance:', 'Number on Medicare:',\n 'Number on Medicaid:', 'Number of Uninsured:', 'Private Premium:',\n 'Medicare Premium:', 'Medicare Funds:', 'Medicaid Funds:']\n for key in keys:\n row = [key] + results['runSummary'][key]\n writer.writerow(row)\n patients = results['patients']\n writer.writerow(['Patient ID', 'Age', 'Ethnicity', 'Gender',\n 'Education', 'Income', 'Income Bracket', 'QALY', 'Diabetes',\n 'Diagnosed', 'Controlled', 'Deceased'])\n for m in range(len(patients)):\n writer.writerow([m, patients[m].age, patients[m].ethnicity,\n patients[m].gender, patients[m].education, patients[m].income,\n patients[m].IPR, patients[m].QALY, patients[m].diabetes,\n patients[m].diagnosed, patients[m].controlled, patients[m].\n deceased])\n",
"step-5": "import json\nimport glob\nimport argparse\nfrom model.NewModel import runModel\nfrom collections import namedtuple\nimport csv\n\nOutputFile = \"./HealthSimOutputSheet.csv\"\n\nparser = argparse.ArgumentParser(description='Select policy file')\nparser.add_argument('-p', type=str, default='default', help='name of a a policy file')\nparser.add_argument('-n', type=int, default=100000, help='number of patients')\n\nargs = parser.parse_args()\n\nNumPatients = args.n\n\npolicyName = args.p\nmatchingPolicies = glob.glob(f\"./policies/{policyName}*\")\n\nif len(matchingPolicies) == 0:\n raise SystemExit(f\"No matching policy named {policyName}\")\nelif len(matchingPolicies) > 1:\n raise SystemExit(f\"Multiple matching policies for {policyName}: {matchingPolicies}\")\n\npolicyFile = matchingPolicies[0]\n\nwith open(policyFile, 'r') as stream:\n # magic to turn json into an object instead of a dict\n # https://stackoverflow.com/a/15882054\n policySettings = json.load(stream, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))\n\n\nresults = runModel(policySettings, NumPatients)\n\nwith open(OutputFile, 'w', newline='') as csvfile:\n writer = csv.writer(csvfile)\n keys = [\"Number on Private Insurance:\", \"Number on Medicare:\",\n \"Number on Medicaid:\", \"Number of Uninsured:\",\n \"Private Premium:\", \"Medicare Premium:\",\n \"Medicare Funds:\", \"Medicaid Funds:\"]\n\n for key in keys:\n row = [key] + results['runSummary'][key]\n writer.writerow(row)\n\n patients = results['patients']\n writer.writerow([\"Patient ID\", \"Age\", \"Ethnicity\", \"Gender\", \"Education\", \"Income\", \"Income Bracket\", \"QALY\", \"Diabetes\", \"Diagnosed\", \"Controlled\", \"Deceased\"])\n for m in range(len(patients)):\n writer.writerow([m, patients[m].age, patients[m].ethnicity, patients[m].gender, patients[m].education, patients[m].income, patients[m].IPR, patients[m].QALY, patients[m].diabetes, patients[m].diagnosed, patients[m].controlled, patients[m].deceased])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calcula_elongacao(A, φ, ω, t):
x = A * m.cos(φ + φ * t)
return x
<|reserved_special_token_1|>
import math as m
def calcula_elongacao(A, φ, ω, t):
x = A * m.cos(φ + φ * t)
return x
<|reserved_special_token_1|>
import math as m
def calcula_elongacao(A, ϕ, ω, t):
x = A * m.cos(ϕ + ϕ * t )
return x
|
flexible
|
{
"blob_id": "225687729b64f455bcc841e83105c7444efdfad3",
"index": 5545,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef calcula_elongacao(A, φ, ω, t):\n x = A * m.cos(φ + φ * t)\n return x\n",
"step-3": "import math as m\n\n\ndef calcula_elongacao(A, φ, ω, t):\n x = A * m.cos(φ + φ * t)\n return x\n",
"step-4": "import math as m\n\ndef calcula_elongacao(A, ϕ, ω, t):\n x = A * m.cos(ϕ + ϕ * t )\n return x",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import renderdoc as rd
from typing import List
import rdtest
class D3D12_Resource_Mapping_Zoo(rdtest.TestCase):
demos_test_name = 'D3D12_Resource_Mapping_Zoo'
def test_debug_pixel(self, x, y, test_name):
pipe: rd.PipeState = self.controller.GetPipelineState()
if not pipe.GetShaderReflection(rd.ShaderStage.Pixel).debugInfo.debuggable:
rdtest.log.print("Skipping undebuggable shader at {}.".format(test_name))
return
# Debug the shader
trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.ReplayController.NoPreference,
rd.ReplayController.NoPreference)
cycles, variables = self.process_trace(trace)
output = self.find_output_source_var(trace, rd.ShaderBuiltin.ColorOutput, 0)
debugged = self.evaluate_source_var(output, variables)
try:
self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x, y, debugged.value.f32v[0:4])
except rdtest.TestFailureException as ex:
rdtest.log.error("Test {} did not match. {}".format(test_name, str(ex)))
return False
finally:
self.controller.FreeTrace(trace)
rdtest.log.success("Test {} matched as expected".format(test_name))
return True
def check_capture(self):
if not self.controller.GetAPIProperties().shaderDebugging:
rdtest.log.success("Shader debugging not enabled, skipping test")
return
failed = False
test_marker: rd.ActionDescription = self.find_action("sm_5_0")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, "sm_5_0") or failed
test_marker: rd.ActionDescription = self.find_action("sm_5_1")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
failed = not self.test_debug_pixel(200, 200, "sm_5_1") or failed
rdtest.log.begin_section("Resource array tests")
test_marker: rd.ActionDescription = self.find_action("ResArray")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y, "ResArray({},{})".format(x, y)) or failed
rdtest.log.end_section("Resource array tests")
rdtest.log.begin_section("Bindless tests")
test_marker: rd.ActionDescription = self.find_action("Bindless")
action = test_marker.next
self.controller.SetFrameEvent(action.eventId, False)
for y in range(4):
for x in range(4):
failed = not self.test_debug_pixel(200 + x, 200 + y, "Bindless({},{})".format(x, y)) or failed
rdtest.log.end_section("Bindless tests")
if failed:
raise rdtest.TestFailureException("Some tests were not as expected")
rdtest.log.success("All tests matched")
|
normal
|
{
"blob_id": "565888d771f53934805555390e48d4886a43bdb6",
"index": 189,
"step-1": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n <mask token>\n <mask token>\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-2": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n <mask token>\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-3": "<mask token>\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-4": "import renderdoc as rd\nfrom typing import List\nimport rdtest\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel\n ).debugInfo.debuggable:\n rdtest.log.print('Skipping undebuggable shader at {}.'.format(\n test_name))\n return\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.\n ReplayController.NoPreference, rd.ReplayController.NoPreference)\n cycles, variables = self.process_trace(trace)\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.\n ColorOutput, 0)\n debugged = self.evaluate_source_var(output, variables)\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x,\n y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error('Test {} did not match. {}'.format(test_name,\n str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n rdtest.log.success('Test {} matched as expected'.format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success('Shader debugging not enabled, skipping test')\n return\n failed = False\n test_marker: rd.ActionDescription = self.find_action('sm_5_0')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_0') or failed\n test_marker: rd.ActionDescription = self.find_action('sm_5_1')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, 'sm_5_1') or failed\n rdtest.log.begin_section('Resource array tests')\n test_marker: rd.ActionDescription = self.find_action('ResArray')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'ResArray({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Resource array tests')\n rdtest.log.begin_section('Bindless tests')\n test_marker: rd.ActionDescription = self.find_action('Bindless')\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y,\n 'Bindless({},{})'.format(x, y)) or failed\n rdtest.log.end_section('Bindless tests')\n if failed:\n raise rdtest.TestFailureException('Some tests were not as expected'\n )\n rdtest.log.success('All tests matched')\n",
"step-5": "import renderdoc as rd\nfrom typing import List\nimport rdtest\n\n\nclass D3D12_Resource_Mapping_Zoo(rdtest.TestCase):\n demos_test_name = 'D3D12_Resource_Mapping_Zoo'\n\n def test_debug_pixel(self, x, y, test_name):\n pipe: rd.PipeState = self.controller.GetPipelineState()\n\n if not pipe.GetShaderReflection(rd.ShaderStage.Pixel).debugInfo.debuggable:\n rdtest.log.print(\"Skipping undebuggable shader at {}.\".format(test_name))\n return\n\n # Debug the shader\n trace: rd.ShaderDebugTrace = self.controller.DebugPixel(x, y, rd.ReplayController.NoPreference,\n rd.ReplayController.NoPreference)\n\n cycles, variables = self.process_trace(trace)\n\n output = self.find_output_source_var(trace, rd.ShaderBuiltin.ColorOutput, 0)\n\n debugged = self.evaluate_source_var(output, variables)\n\n try:\n self.check_pixel_value(pipe.GetOutputTargets()[0].resourceId, x, y, debugged.value.f32v[0:4])\n except rdtest.TestFailureException as ex:\n rdtest.log.error(\"Test {} did not match. {}\".format(test_name, str(ex)))\n return False\n finally:\n self.controller.FreeTrace(trace)\n\n rdtest.log.success(\"Test {} matched as expected\".format(test_name))\n return True\n\n def check_capture(self):\n if not self.controller.GetAPIProperties().shaderDebugging:\n rdtest.log.success(\"Shader debugging not enabled, skipping test\")\n return\n\n failed = False\n\n test_marker: rd.ActionDescription = self.find_action(\"sm_5_0\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, \"sm_5_0\") or failed\n\n test_marker: rd.ActionDescription = self.find_action(\"sm_5_1\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n failed = not self.test_debug_pixel(200, 200, \"sm_5_1\") or failed\n\n rdtest.log.begin_section(\"Resource array tests\")\n test_marker: rd.ActionDescription = self.find_action(\"ResArray\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y, \"ResArray({},{})\".format(x, y)) or failed\n\n rdtest.log.end_section(\"Resource array tests\")\n\n rdtest.log.begin_section(\"Bindless tests\")\n test_marker: rd.ActionDescription = self.find_action(\"Bindless\")\n action = test_marker.next\n self.controller.SetFrameEvent(action.eventId, False)\n\n for y in range(4):\n for x in range(4):\n failed = not self.test_debug_pixel(200 + x, 200 + y, \"Bindless({},{})\".format(x, y)) or failed\n\n rdtest.log.end_section(\"Bindless tests\")\n\n if failed:\n raise rdtest.TestFailureException(\"Some tests were not as expected\")\n\n rdtest.log.success(\"All tests matched\")\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
__author__ = 'christopher'
import fabio
import pyFAI
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from pims.tiff_stack import TiffStack_tifffile as TiffStack
from skxray.io.save_powder_output import save_output
from xpd_workflow.mask_tools import *
geo = pyFAI.load(
'/mnt/bulk-data/research_data/USC_beamtime/08-05-2015/2015-08-05/Ni_STD/Ni_PDF_60s-00000.poni')
dq = geo.deltaQ((2048, 2048))
q = geo.qArray((2048, 2048))
bins = 8000
# plt.imshow(dq)
# plt.show()
# AAA
# dq_mean = sts.binned_statistic(q.ravel(), dq.ravel(), bins=bins,
# range=[0, q.max()], statistic='mean')
# dq_median = sts.binned_statistic(q.ravel(), dq.ravel(), bins=bi
# range=[0, q.max()], statistic='median')
# plt.plot(dq_mean[1][:-1], dq_mean[0])
# plt.plot(dq_median[1][:-1], dq_median[0])
# plt.show()
r = geo.qArray((2048, 2048))
nr = r / np.max(r)
img = np.sin(nr * np.pi * 3) * np.exp(-10 * nr)
ideal_img = dc(img)
smax = np.max(img)
smin = np.min(img)
bad_pixels = []
'''
for i in xrange(np.random.randint(1000, 2000)):
x, y = np.random.randint(0, 2048), np.random.randint(0, 2048)
if np.random.random() >= .5:
img[x, y] = smax * 3
else:
img[x, y] = smin * 3
bad_pixels.append([x, y])
'''
plt.imshow(img, vmin=smin, vmax=smax)
plt.show()
# plt.imshow(idsr - dsr)
# plt.show()
# ideal_median = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,
# range=[0, q.max()], statistic='median')
#
# ideal_mean = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,
# range=[0, q.max()], statistic='mean')
# ideal_std = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,
# range=[0, q.max()], statistic=np.std)
# median = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,
# range=[0, q.max()], statistic='median')
#
# mean = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,
# range=[0, q.max()], statistic='mean')
# std = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,
# range=[0, q.max()], statistic=np.std)
# plt.plot(ideal_mean[1][:-1], ideal_mean[0], label='ideal mean')
# plt.plot(ideal_median[1][:-1], ideal_median[0], label='ideal median')
# plt.plot(ideal_std[1][:-1], ideal_std[0], label='ideal std')
# plt.legend()
# plt.show()
# plt.plot(mean[1][:-1], mean[0], label='mean')
# plt.plot(median[1][:-1], median[0], label='median')
# # plt.plot(std[1][:-1], std[0], label='ideal std')
# plt.legend()
# plt.show()
perfect_mask = (img - ideal_img) != 0
for i in [10,
# 9, 8, 7, 6, 5, 4.5, 4
]:
rbmsk = ring_blur_mask(img, geo, i)
print i
print 'good mask', np.sum(perfect_mask == rbmsk)
print 'under masked', np.sum(perfect_mask > rbmsk)
print 'over masked', np.sum(perfect_mask < rbmsk)
print
# '''
plt.imshow(img, interpolation='none', origin='lower', aspect='auto')
for y, x in bad_pixels:
plt.plot(x, y, 'ro', mfc='r', mec='r', ms=10)
for y, x in zip(
np.where(rbmsk != 0)[0],
np.where(rbmsk != 0)[1]
):
plt.plot(x, y, 'go', mfc='g', mec='g', ms=5)
plt.show()
# '''
print q[1907, 173], q[173, 1907]
_, hist_bins, _ = plt.hist(img[np.where((q > 313.) & (q < 314.))], bins=50)
plt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]), color='r')
plt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]) + np.std(img[np.where((q > 313.) & (q < 314.))]))
plt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]) - np.std(img[np.where((q > 313.) & (q < 314.))]))
# plt.hist(img[np.where((q > 287.) & (q < 288.) & (rbmsk != 1))],
# bins=50
# bins=hist_bins
# )
plt.show()
'''
mr = dc(q)
mr[rbmsk.astype(bool)] = -1
msk_median = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,
range=[0, mr.max()], statistic='median')
msk_mean = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,
range=[0, mr.max()], statistic='mean')
msk_std = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,
range=[0, mr.max()], statistic=np.std)
plt.plot(msk_mean[1][:-1], msk_mean[0], label='mean')
plt.plot(msk_median[1][:-1], msk_median[0], label='median')
# plt.plot(std[1][:-1], std[0], label='ideal std')
plt.legend()
plt.show()
# '''
|
normal
|
{
"blob_id": "50f6bcb4d2223d864cca92778ab3483a2d2c3214",
"index": 5283,
"step-1": "__author__ = 'christopher'\nimport fabio\nimport pyFAI\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import LogNorm\nfrom pims.tiff_stack import TiffStack_tifffile as TiffStack\nfrom skxray.io.save_powder_output import save_output\nfrom xpd_workflow.mask_tools import *\n\ngeo = pyFAI.load(\n '/mnt/bulk-data/research_data/USC_beamtime/08-05-2015/2015-08-05/Ni_STD/Ni_PDF_60s-00000.poni')\ndq = geo.deltaQ((2048, 2048))\nq = geo.qArray((2048, 2048))\nbins = 8000\n# plt.imshow(dq)\n# plt.show()\n# AAA\n# dq_mean = sts.binned_statistic(q.ravel(), dq.ravel(), bins=bins,\n# range=[0, q.max()], statistic='mean')\n# dq_median = sts.binned_statistic(q.ravel(), dq.ravel(), bins=bi\n# range=[0, q.max()], statistic='median')\n# plt.plot(dq_mean[1][:-1], dq_mean[0])\n# plt.plot(dq_median[1][:-1], dq_median[0])\n# plt.show()\nr = geo.qArray((2048, 2048))\nnr = r / np.max(r)\n\nimg = np.sin(nr * np.pi * 3) * np.exp(-10 * nr)\nideal_img = dc(img)\nsmax = np.max(img)\nsmin = np.min(img)\nbad_pixels = []\n'''\nfor i in xrange(np.random.randint(1000, 2000)):\n x, y = np.random.randint(0, 2048), np.random.randint(0, 2048)\n if np.random.random() >= .5:\n img[x, y] = smax * 3\n else:\n img[x, y] = smin * 3\n bad_pixels.append([x, y])\n'''\nplt.imshow(img, vmin=smin, vmax=smax)\nplt.show()\n\n# plt.imshow(idsr - dsr)\n# plt.show()\n# ideal_median = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,\n# range=[0, q.max()], statistic='median')\n#\n# ideal_mean = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,\n# range=[0, q.max()], statistic='mean')\n# ideal_std = sts.binned_statistic(q.ravel(), ideal_img.ravel(), bins=bins,\n# range=[0, q.max()], statistic=np.std)\n\n\n# median = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,\n# range=[0, q.max()], statistic='median')\n#\n# mean = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,\n# range=[0, q.max()], statistic='mean')\n# std = sts.binned_statistic(q.ravel(), img.ravel(), bins=bins,\n# range=[0, q.max()], statistic=np.std)\n\n# plt.plot(ideal_mean[1][:-1], ideal_mean[0], label='ideal mean')\n# plt.plot(ideal_median[1][:-1], ideal_median[0], label='ideal median')\n# plt.plot(ideal_std[1][:-1], ideal_std[0], label='ideal std')\n# plt.legend()\n# plt.show()\n\n# plt.plot(mean[1][:-1], mean[0], label='mean')\n# plt.plot(median[1][:-1], median[0], label='median')\n# # plt.plot(std[1][:-1], std[0], label='ideal std')\n# plt.legend()\n# plt.show()\n\nperfect_mask = (img - ideal_img) != 0\nfor i in [10,\n # 9, 8, 7, 6, 5, 4.5, 4\n ]:\n rbmsk = ring_blur_mask(img, geo, i)\n print i\n print 'good mask', np.sum(perfect_mask == rbmsk)\n print 'under masked', np.sum(perfect_mask > rbmsk)\n print 'over masked', np.sum(perfect_mask < rbmsk)\n print\n# '''\nplt.imshow(img, interpolation='none', origin='lower', aspect='auto')\nfor y, x in bad_pixels:\n plt.plot(x, y, 'ro', mfc='r', mec='r', ms=10)\nfor y, x in zip(\n np.where(rbmsk != 0)[0],\n np.where(rbmsk != 0)[1]\n):\n plt.plot(x, y, 'go', mfc='g', mec='g', ms=5)\nplt.show()\n# '''\nprint q[1907, 173], q[173, 1907]\n\n_, hist_bins, _ = plt.hist(img[np.where((q > 313.) & (q < 314.))], bins=50)\nplt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]), color='r')\nplt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]) + np.std(img[np.where((q > 313.) & (q < 314.))]))\nplt.axvline(np.mean(img[np.where((q > 313.) & (q < 314.))]) - np.std(img[np.where((q > 313.) & (q < 314.))]))\n# plt.hist(img[np.where((q > 287.) & (q < 288.) & (rbmsk != 1))],\n # bins=50\n # bins=hist_bins\n # )\nplt.show()\n'''\nmr = dc(q)\nmr[rbmsk.astype(bool)] = -1\n\nmsk_median = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,\n range=[0, mr.max()], statistic='median')\nmsk_mean = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,\n range=[0, mr.max()], statistic='mean')\nmsk_std = sts.binned_statistic(mr.ravel(), img.ravel(), bins=bins,\n range=[0, mr.max()], statistic=np.std)\n\nplt.plot(msk_mean[1][:-1], msk_mean[0], label='mean')\nplt.plot(msk_median[1][:-1], msk_median[0], label='median')\n# plt.plot(std[1][:-1], std[0], label='ideal std')\nplt.legend()\nplt.show()\n# '''\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import logging
import os
import callbacks
import commands
import dice
import echo
import inline
import keyboards
import mybot
import myenigma
import poll
import rocketgram
import send
import unknown
# avoid to remove "unused" imports by optimizers
def fix_imports():
_ = callbacks
_ = commands
_ = echo
_ = keyboards
_ = myenigma
_ = inline
_ = send
_ = dice
_ = unknown
_ = poll
logger = logging.getLogger('minibots.engine')
def main():
mode = os.environ.get('MODE')
if mode is None and 'DYNO' in os.environ:
mode = 'heroku'
if mode not in ('updates', 'webhook', 'heroku'):
raise TypeError('MODE must be `updates` or `webhook` or `heroku`!')
logging.basicConfig(format='%(asctime)s - %(levelname)-5s - %(name)-25s: %(message)s')
logging.basicConfig(level=logging.ERROR)
logging.getLogger('engine').setLevel(logging.INFO)
logging.getLogger('mybot').setLevel(logging.DEBUG)
logging.getLogger('rocketgram').setLevel(logging.DEBUG)
logging.getLogger('rocketgram.raw.in').setLevel(logging.INFO)
logging.getLogger('rocketgram.raw.out').setLevel(logging.INFO)
logger.info('Starting bot''s template in %s...', mode)
bot = mybot.get_bot(os.environ['TOKEN'].strip())
if mode == 'updates':
rocketgram.UpdatesExecutor.run(bot, drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))))
else:
port = int(os.environ['PORT']) if mode == 'heroku' else int(os.environ.get('WEBHOOK_PORT', 8080))
rocketgram.AioHttpExecutor.run(bot,
os.environ['WEBHOOK_URL'].strip(),
os.environ.get('WEBHOOK_PATH', '/').strip(),
host='0.0.0.0', port=port,
drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))),
webhook_remove=not mode == 'heroku')
logger.info('Bye!')
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "fd904c70b350c650362c55ccb3b915371f24e267",
"index": 9623,
"step-1": "import logging\nimport os\n\nimport callbacks\nimport commands\nimport dice\nimport echo\nimport inline\nimport keyboards\nimport mybot\nimport myenigma\nimport poll\nimport rocketgram\nimport send\nimport unknown\n\n\n# avoid to remove \"unused\" imports by optimizers\ndef fix_imports():\n _ = callbacks\n _ = commands\n _ = echo\n _ = keyboards\n _ = myenigma\n _ = inline\n _ = send\n _ = dice\n _ = unknown\n _ = poll\n\n\nlogger = logging.getLogger('minibots.engine')\n\n\ndef main():\n mode = os.environ.get('MODE')\n if mode is None and 'DYNO' in os.environ:\n mode = 'heroku'\n\n if mode not in ('updates', 'webhook', 'heroku'):\n raise TypeError('MODE must be `updates` or `webhook` or `heroku`!')\n\n logging.basicConfig(format='%(asctime)s - %(levelname)-5s - %(name)-25s: %(message)s')\n logging.basicConfig(level=logging.ERROR)\n logging.getLogger('engine').setLevel(logging.INFO)\n logging.getLogger('mybot').setLevel(logging.DEBUG)\n logging.getLogger('rocketgram').setLevel(logging.DEBUG)\n logging.getLogger('rocketgram.raw.in').setLevel(logging.INFO)\n logging.getLogger('rocketgram.raw.out').setLevel(logging.INFO)\n\n logger.info('Starting bot''s template in %s...', mode)\n\n bot = mybot.get_bot(os.environ['TOKEN'].strip())\n\n if mode == 'updates':\n rocketgram.UpdatesExecutor.run(bot, drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))))\n else:\n port = int(os.environ['PORT']) if mode == 'heroku' else int(os.environ.get('WEBHOOK_PORT', 8080))\n rocketgram.AioHttpExecutor.run(bot,\n os.environ['WEBHOOK_URL'].strip(),\n os.environ.get('WEBHOOK_PATH', '/').strip(),\n host='0.0.0.0', port=port,\n drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))),\n webhook_remove=not mode == 'heroku')\n\n logger.info('Bye!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def calculate_data_list():
counter = 0
btc = 'BTC'
symbols = []
all_positions = []
positions_final = []
volume = []
c = []
price_change = []
data = client.get_ticker()
for x in range(len(data)):
if btc in data[x]['symbol'] and data[x]['symbol'
] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':
if float(data[x]['quoteVolume']) > 100:
all_positions.append(x)
for x in all_positions:
c.append(float(data[x]['priceChangePercent']))
i = sorted(range(len(c)), key=lambda k: c[k])
i.reverse()
while len(positions_final) < 20 and len(positions_final) < len(
all_positions):
symbols.append(data[all_positions[i[counter]]]['symbol'])
positions_final.append(all_positions[i[counter]])
volume.append(data[all_positions[i[counter]]]['quoteVolume'])
price_change.append(data[all_positions[i[counter]]][
'priceChangePercent'])
counter += 1
return symbols, volume, positions_final, price_change
def get_kline():
symbols, volume, pozitii, price_change = calculate_data_list()
prices = []
prices1 = []
k = []
for x in symbols:
try:
order = client.get_klines(symbol=x, interval='1m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
try:
order1 = client.get_klines(symbol=x, limit=1000, interval='15m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
if len(order1) < 970:
a = symbols.index(x)
k.append(a)
else:
prices.append([])
prices1.append([])
for i in range(len(order)):
prices[-1].append(float(order[i][1]))
for i in range(len(order1)):
prices1[-1].append(float(order1[i][1]))
k.reverse()
for x in k:
symbols.pop(x)
volume.pop(x)
all_positions.pop(x)
price_change.pop(x)
return symbols, volume, pozitii, prices, prices1, price_change
def process_depth(msg):
sums5 = 0
sumb5 = 0
m = -1
for x in range(5):
if float(msg['data']['bids'][x][1]) > m:
m = float(msg['data']['bids'][x][1])
sums5 = sums5 + float(msg['data']['bids'][x][1])
sumb5 = sumb5 + float(msg['data']['asks'][x][1])
ratio1 = sums5 / sumb5
if ratio1 < 1:
ratio1 = 1 / ratio1 * -1 + 1
else:
ratio1 -= 1
sums20 = 0
sumb20 = 0
ratio2 = 0
try:
for x in range(17):
sums20 = sums20 + float(msg['data']['bids'][x][1])
sumb20 = sumb20 + float(msg['data']['asks'][x][1])
ratio2 = sums20 / sumb20
if ratio2 < 1:
ratio2 = 1 / ratio2 * -1 + 1
else:
ratio2 -= 1
except Exception as e:
print('')
for i in range(len(symbols)):
simbol = symbols[i].lower() + '@depth20'
if simbol == msg['stream']:
ratio5[i] = round(ratio1, 2)
ratio20[i] = round(ratio2, 2)
max_order5[i] = m
ratio5_sum[i] = round(float(sums5) * float(current_price[i]) *
100 / float(volume[i]), 2)
current_price[i] = float(msg['data']['bids'][0][0])
<|reserved_special_token_0|>
def kline_continuum():
i = 0
while True:
time.sleep(60)
for x in range(len(symbols)):
k_line_1m[x].pop(0)
k_line_1m[x].append(current_price[x])
if i % 15 == 0:
k_line_15m[x].pop(0)
k_line_15m[x].append(current_price[x])
i += 1
<|reserved_special_token_0|>
def calculate_score():
for x in range(len(symbols)):
score = 0
a = float(price_chance_2_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 1.5:
score += 1.5
elif a >= 1.5 and a < 2:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_5_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 2:
score += 1.5
elif a >= 2 and a < 3:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_15_min[x])
if a <= 1 and a > -0.5:
score += 0.25
elif a <= -0.5 and a > -1:
score += 0.5
elif a <= -1 and a > -1.5:
score += 0.75
elif a <= -1.5:
score += 1
a = float(price_change_25_30_min[x])
if a <= 2 and a > -0.75:
score += 0.25
elif a <= -0.75 and a > -1.25:
score += 0.5
elif a <= -1.25 and a > -1.75:
score += 0.75
elif a <= -1.75:
score += 1
a = float(price_chance_1_hour[x])
if a <= 2 and a >= 0:
score += 0.5
elif a <= 0 and a > -2:
score += 0.75
elif a <= -2:
score += 1
a = float(price_chance_3_hour[x])
if a <= 5 and a > -1:
score += 0.25
elif a <= -1 and a > -3:
score += 0.5
elif a <= -3 and a > -6:
score += 0.75
elif a <= -6:
score += 1
a = float(price_chance_8_hour[x])
if a <= 0 and a > -4:
score += 0.25
elif a <= -4 and a > -6:
score += 0.5
elif a <= -6:
score += 0.75
if float(ratio5[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])):
if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min
[x]) > 1:
if float(ratio5_10sec[x][i]) > 0:
a += 1
if float(ratio5_sum_10sec[x][i]) > 0.3:
a += 1
score += a / len(ratio5_sum_10sec[x])
if float(ratio20[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x]) - 1):
if float(ratio5_10sec[x][i]) > 0:
a += 1
if a <= 2:
score += 0.25
elif a > 2 and a <= 4:
score += 0.5
elif a > 4 and a <= 7:
score += 0.75
elif a > 7:
score += 1
a = 0
for i in range(20, 1, -1):
if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):
a += 1
score += a / 10
if float(price_change_1_days[x]) > 5:
score += 0.3
if float(price_change_3_days[x]) > 10:
score += 0.25
if float(price_change_5_days[x]) > 15:
score += 0.25
if float(price_change_7_days[x]) > 20:
score += 0.25
if float(price_change_10_days[x]) > -25:
score += 0.25
a = float(average_change_10_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_20_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_50_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_100_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
total_score[x] = score
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calculate_data_list():
counter = 0
btc = 'BTC'
symbols = []
all_positions = []
positions_final = []
volume = []
c = []
price_change = []
data = client.get_ticker()
for x in range(len(data)):
if btc in data[x]['symbol'] and data[x]['symbol'
] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':
if float(data[x]['quoteVolume']) > 100:
all_positions.append(x)
for x in all_positions:
c.append(float(data[x]['priceChangePercent']))
i = sorted(range(len(c)), key=lambda k: c[k])
i.reverse()
while len(positions_final) < 20 and len(positions_final) < len(
all_positions):
symbols.append(data[all_positions[i[counter]]]['symbol'])
positions_final.append(all_positions[i[counter]])
volume.append(data[all_positions[i[counter]]]['quoteVolume'])
price_change.append(data[all_positions[i[counter]]][
'priceChangePercent'])
counter += 1
return symbols, volume, positions_final, price_change
def get_kline():
symbols, volume, pozitii, price_change = calculate_data_list()
prices = []
prices1 = []
k = []
for x in symbols:
try:
order = client.get_klines(symbol=x, interval='1m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
try:
order1 = client.get_klines(symbol=x, limit=1000, interval='15m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
if len(order1) < 970:
a = symbols.index(x)
k.append(a)
else:
prices.append([])
prices1.append([])
for i in range(len(order)):
prices[-1].append(float(order[i][1]))
for i in range(len(order1)):
prices1[-1].append(float(order1[i][1]))
k.reverse()
for x in k:
symbols.pop(x)
volume.pop(x)
all_positions.pop(x)
price_change.pop(x)
return symbols, volume, pozitii, prices, prices1, price_change
def process_depth(msg):
sums5 = 0
sumb5 = 0
m = -1
for x in range(5):
if float(msg['data']['bids'][x][1]) > m:
m = float(msg['data']['bids'][x][1])
sums5 = sums5 + float(msg['data']['bids'][x][1])
sumb5 = sumb5 + float(msg['data']['asks'][x][1])
ratio1 = sums5 / sumb5
if ratio1 < 1:
ratio1 = 1 / ratio1 * -1 + 1
else:
ratio1 -= 1
sums20 = 0
sumb20 = 0
ratio2 = 0
try:
for x in range(17):
sums20 = sums20 + float(msg['data']['bids'][x][1])
sumb20 = sumb20 + float(msg['data']['asks'][x][1])
ratio2 = sums20 / sumb20
if ratio2 < 1:
ratio2 = 1 / ratio2 * -1 + 1
else:
ratio2 -= 1
except Exception as e:
print('')
for i in range(len(symbols)):
simbol = symbols[i].lower() + '@depth20'
if simbol == msg['stream']:
ratio5[i] = round(ratio1, 2)
ratio20[i] = round(ratio2, 2)
max_order5[i] = m
ratio5_sum[i] = round(float(sums5) * float(current_price[i]) *
100 / float(volume[i]), 2)
current_price[i] = float(msg['data']['bids'][0][0])
def process_ticker(msg):
i = 0
for x in symbols:
for y in range(len(msg)):
if x == str(msg[y]['s']):
volume[i] = int(float(msg[y]['q']))
price_change[i] = int(float(msg[y]['P']))
i += 1
<|reserved_special_token_0|>
def kline_continuum():
i = 0
while True:
time.sleep(60)
for x in range(len(symbols)):
k_line_1m[x].pop(0)
k_line_1m[x].append(current_price[x])
if i % 15 == 0:
k_line_15m[x].pop(0)
k_line_15m[x].append(current_price[x])
i += 1
def report_10_seconds():
while True:
for x in range(len(symbols)):
if len(ratio5_10sec[x]) > 10:
ratio5_10sec[x].pop(0)
if len(ratio5_sum_10sec[x]) > 10:
ratio5_sum_10sec[x].pop(0)
ratio5_10sec[x].append(ratio5[x])
ratio5_sum_10sec[x].append(ratio5_sum[x])
time.sleep(1)
def calculate_score():
for x in range(len(symbols)):
score = 0
a = float(price_chance_2_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 1.5:
score += 1.5
elif a >= 1.5 and a < 2:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_5_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 2:
score += 1.5
elif a >= 2 and a < 3:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_15_min[x])
if a <= 1 and a > -0.5:
score += 0.25
elif a <= -0.5 and a > -1:
score += 0.5
elif a <= -1 and a > -1.5:
score += 0.75
elif a <= -1.5:
score += 1
a = float(price_change_25_30_min[x])
if a <= 2 and a > -0.75:
score += 0.25
elif a <= -0.75 and a > -1.25:
score += 0.5
elif a <= -1.25 and a > -1.75:
score += 0.75
elif a <= -1.75:
score += 1
a = float(price_chance_1_hour[x])
if a <= 2 and a >= 0:
score += 0.5
elif a <= 0 and a > -2:
score += 0.75
elif a <= -2:
score += 1
a = float(price_chance_3_hour[x])
if a <= 5 and a > -1:
score += 0.25
elif a <= -1 and a > -3:
score += 0.5
elif a <= -3 and a > -6:
score += 0.75
elif a <= -6:
score += 1
a = float(price_chance_8_hour[x])
if a <= 0 and a > -4:
score += 0.25
elif a <= -4 and a > -6:
score += 0.5
elif a <= -6:
score += 0.75
if float(ratio5[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])):
if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min
[x]) > 1:
if float(ratio5_10sec[x][i]) > 0:
a += 1
if float(ratio5_sum_10sec[x][i]) > 0.3:
a += 1
score += a / len(ratio5_sum_10sec[x])
if float(ratio20[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x]) - 1):
if float(ratio5_10sec[x][i]) > 0:
a += 1
if a <= 2:
score += 0.25
elif a > 2 and a <= 4:
score += 0.5
elif a > 4 and a <= 7:
score += 0.75
elif a > 7:
score += 1
a = 0
for i in range(20, 1, -1):
if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):
a += 1
score += a / 10
if float(price_change_1_days[x]) > 5:
score += 0.3
if float(price_change_3_days[x]) > 10:
score += 0.25
if float(price_change_5_days[x]) > 15:
score += 0.25
if float(price_change_7_days[x]) > 20:
score += 0.25
if float(price_change_10_days[x]) > -25:
score += 0.25
a = float(average_change_10_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_20_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_50_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_100_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
total_score[x] = score
def print_results():
time.sleep(10)
while True:
for x in range(len(symbols)):
try:
price_chance_2_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-2]) - 100, 2)
price_chance_5_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-5]) - 100, 2)
price_chance_15_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-15]) - 100, 2)
price_chance_30_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_chance_1_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-60]) - 100, 2)
price_chance_3_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-180]) - 100, 2)
price_chance_8_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][20]) - 100, 2)
price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_change_1_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-96]) - 100, 1)
price_change_3_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-288]) - 100, 1)
price_change_5_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-480]) - 100, 1)
price_change_7_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-672]) - 100, 1)
price_change_10_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-960]) - 100, 1)
average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) /
10, 8)
average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) /
20, 8)
average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) /
50, 8)
average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /
100, 8)
average_change_10_min[x] = round(float(current_price[x]) *
100 / float(average_10_min[x]) - 100, 2)
average_change_20_min[x] = round(float(current_price[x]) *
100 / float(average_20_min[x]) - 100, 2)
average_change_50_min[x] = round(float(current_price[x]) *
100 / float(average_50_min[x]) - 100, 2)
average_change_100_min[x] = round(float(current_price[x]) *
100 / float(average_100_min[x]) - 100, 2)
except Exception as e:
print(e)
calculate_score()
sort_by = total_score
sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])
sorted_data.reverse()
print(time.ctime())
print(
'%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',
'30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',
'25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))
for k in range(10):
i = sorted_data[k]
print(
'%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],
price_chance_2_min[i], price_chance_5_min[i],
price_chance_15_min[i], price_chance_30_min[i],
price_chance_1_hour[i], average_change_10_min[i],
average_change_20_min[i], average_change_50_min[i],
average_change_100_min[i], price_chance_8_hour[i],
price_change_25_30_min[i], ratio5_sum[i],
price_change_1_days[i], price_change_3_days[i],
price_change_5_days[i], price_change_7_days[i],
price_change_10_days[i]))
try:
if float(total_score[sorted_data[0]]) > 10:
winsound.PlaySound('\\Sound.wav', winsound.SND_FILENAME)
except Exception as e:
print(e)
time.sleep(1)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calculate_data_list():
counter = 0
btc = 'BTC'
symbols = []
all_positions = []
positions_final = []
volume = []
c = []
price_change = []
data = client.get_ticker()
for x in range(len(data)):
if btc in data[x]['symbol'] and data[x]['symbol'
] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':
if float(data[x]['quoteVolume']) > 100:
all_positions.append(x)
for x in all_positions:
c.append(float(data[x]['priceChangePercent']))
i = sorted(range(len(c)), key=lambda k: c[k])
i.reverse()
while len(positions_final) < 20 and len(positions_final) < len(
all_positions):
symbols.append(data[all_positions[i[counter]]]['symbol'])
positions_final.append(all_positions[i[counter]])
volume.append(data[all_positions[i[counter]]]['quoteVolume'])
price_change.append(data[all_positions[i[counter]]][
'priceChangePercent'])
counter += 1
return symbols, volume, positions_final, price_change
def get_kline():
symbols, volume, pozitii, price_change = calculate_data_list()
prices = []
prices1 = []
k = []
for x in symbols:
try:
order = client.get_klines(symbol=x, interval='1m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
try:
order1 = client.get_klines(symbol=x, limit=1000, interval='15m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
if len(order1) < 970:
a = symbols.index(x)
k.append(a)
else:
prices.append([])
prices1.append([])
for i in range(len(order)):
prices[-1].append(float(order[i][1]))
for i in range(len(order1)):
prices1[-1].append(float(order1[i][1]))
k.reverse()
for x in k:
symbols.pop(x)
volume.pop(x)
all_positions.pop(x)
price_change.pop(x)
return symbols, volume, pozitii, prices, prices1, price_change
def process_depth(msg):
sums5 = 0
sumb5 = 0
m = -1
for x in range(5):
if float(msg['data']['bids'][x][1]) > m:
m = float(msg['data']['bids'][x][1])
sums5 = sums5 + float(msg['data']['bids'][x][1])
sumb5 = sumb5 + float(msg['data']['asks'][x][1])
ratio1 = sums5 / sumb5
if ratio1 < 1:
ratio1 = 1 / ratio1 * -1 + 1
else:
ratio1 -= 1
sums20 = 0
sumb20 = 0
ratio2 = 0
try:
for x in range(17):
sums20 = sums20 + float(msg['data']['bids'][x][1])
sumb20 = sumb20 + float(msg['data']['asks'][x][1])
ratio2 = sums20 / sumb20
if ratio2 < 1:
ratio2 = 1 / ratio2 * -1 + 1
else:
ratio2 -= 1
except Exception as e:
print('')
for i in range(len(symbols)):
simbol = symbols[i].lower() + '@depth20'
if simbol == msg['stream']:
ratio5[i] = round(ratio1, 2)
ratio20[i] = round(ratio2, 2)
max_order5[i] = m
ratio5_sum[i] = round(float(sums5) * float(current_price[i]) *
100 / float(volume[i]), 2)
current_price[i] = float(msg['data']['bids'][0][0])
def process_ticker(msg):
i = 0
for x in symbols:
for y in range(len(msg)):
if x == str(msg[y]['s']):
volume[i] = int(float(msg[y]['q']))
price_change[i] = int(float(msg[y]['P']))
i += 1
<|reserved_special_token_0|>
for x in symbols:
list.append(x.lower() + '@depth20')
<|reserved_special_token_0|>
bm.start()
<|reserved_special_token_0|>
def kline_continuum():
i = 0
while True:
time.sleep(60)
for x in range(len(symbols)):
k_line_1m[x].pop(0)
k_line_1m[x].append(current_price[x])
if i % 15 == 0:
k_line_15m[x].pop(0)
k_line_15m[x].append(current_price[x])
i += 1
def report_10_seconds():
while True:
for x in range(len(symbols)):
if len(ratio5_10sec[x]) > 10:
ratio5_10sec[x].pop(0)
if len(ratio5_sum_10sec[x]) > 10:
ratio5_sum_10sec[x].pop(0)
ratio5_10sec[x].append(ratio5[x])
ratio5_sum_10sec[x].append(ratio5_sum[x])
time.sleep(1)
def calculate_score():
for x in range(len(symbols)):
score = 0
a = float(price_chance_2_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 1.5:
score += 1.5
elif a >= 1.5 and a < 2:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_5_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 2:
score += 1.5
elif a >= 2 and a < 3:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_15_min[x])
if a <= 1 and a > -0.5:
score += 0.25
elif a <= -0.5 and a > -1:
score += 0.5
elif a <= -1 and a > -1.5:
score += 0.75
elif a <= -1.5:
score += 1
a = float(price_change_25_30_min[x])
if a <= 2 and a > -0.75:
score += 0.25
elif a <= -0.75 and a > -1.25:
score += 0.5
elif a <= -1.25 and a > -1.75:
score += 0.75
elif a <= -1.75:
score += 1
a = float(price_chance_1_hour[x])
if a <= 2 and a >= 0:
score += 0.5
elif a <= 0 and a > -2:
score += 0.75
elif a <= -2:
score += 1
a = float(price_chance_3_hour[x])
if a <= 5 and a > -1:
score += 0.25
elif a <= -1 and a > -3:
score += 0.5
elif a <= -3 and a > -6:
score += 0.75
elif a <= -6:
score += 1
a = float(price_chance_8_hour[x])
if a <= 0 and a > -4:
score += 0.25
elif a <= -4 and a > -6:
score += 0.5
elif a <= -6:
score += 0.75
if float(ratio5[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])):
if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min
[x]) > 1:
if float(ratio5_10sec[x][i]) > 0:
a += 1
if float(ratio5_sum_10sec[x][i]) > 0.3:
a += 1
score += a / len(ratio5_sum_10sec[x])
if float(ratio20[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x]) - 1):
if float(ratio5_10sec[x][i]) > 0:
a += 1
if a <= 2:
score += 0.25
elif a > 2 and a <= 4:
score += 0.5
elif a > 4 and a <= 7:
score += 0.75
elif a > 7:
score += 1
a = 0
for i in range(20, 1, -1):
if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):
a += 1
score += a / 10
if float(price_change_1_days[x]) > 5:
score += 0.3
if float(price_change_3_days[x]) > 10:
score += 0.25
if float(price_change_5_days[x]) > 15:
score += 0.25
if float(price_change_7_days[x]) > 20:
score += 0.25
if float(price_change_10_days[x]) > -25:
score += 0.25
a = float(average_change_10_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_20_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_50_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_100_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
total_score[x] = score
def print_results():
time.sleep(10)
while True:
for x in range(len(symbols)):
try:
price_chance_2_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-2]) - 100, 2)
price_chance_5_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-5]) - 100, 2)
price_chance_15_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-15]) - 100, 2)
price_chance_30_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_chance_1_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-60]) - 100, 2)
price_chance_3_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-180]) - 100, 2)
price_chance_8_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][20]) - 100, 2)
price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_change_1_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-96]) - 100, 1)
price_change_3_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-288]) - 100, 1)
price_change_5_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-480]) - 100, 1)
price_change_7_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-672]) - 100, 1)
price_change_10_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-960]) - 100, 1)
average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) /
10, 8)
average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) /
20, 8)
average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) /
50, 8)
average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /
100, 8)
average_change_10_min[x] = round(float(current_price[x]) *
100 / float(average_10_min[x]) - 100, 2)
average_change_20_min[x] = round(float(current_price[x]) *
100 / float(average_20_min[x]) - 100, 2)
average_change_50_min[x] = round(float(current_price[x]) *
100 / float(average_50_min[x]) - 100, 2)
average_change_100_min[x] = round(float(current_price[x]) *
100 / float(average_100_min[x]) - 100, 2)
except Exception as e:
print(e)
calculate_score()
sort_by = total_score
sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])
sorted_data.reverse()
print(time.ctime())
print(
'%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',
'30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',
'25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))
for k in range(10):
i = sorted_data[k]
print(
'%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],
price_chance_2_min[i], price_chance_5_min[i],
price_chance_15_min[i], price_chance_30_min[i],
price_chance_1_hour[i], average_change_10_min[i],
average_change_20_min[i], average_change_50_min[i],
average_change_100_min[i], price_chance_8_hour[i],
price_change_25_30_min[i], ratio5_sum[i],
price_change_1_days[i], price_change_3_days[i],
price_change_5_days[i], price_change_7_days[i],
price_change_10_days[i]))
try:
if float(total_score[sorted_data[0]]) > 10:
winsound.PlaySound('\\Sound.wav', winsound.SND_FILENAME)
except Exception as e:
print(e)
time.sleep(1)
<|reserved_special_token_0|>
[thread.start() for thread in threads]
[thread.join() for thread in threads]
<|reserved_special_token_1|>
from binance.client import Client
from binance.websockets import BinanceSocketManager
from binance.enums import *
import time
import threading
import winsound
client = Client(your_api_key, your_api_secret)
def calculate_data_list():
counter = 0
btc = 'BTC'
symbols = []
all_positions = []
positions_final = []
volume = []
c = []
price_change = []
data = client.get_ticker()
for x in range(len(data)):
if btc in data[x]['symbol'] and data[x]['symbol'
] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':
if float(data[x]['quoteVolume']) > 100:
all_positions.append(x)
for x in all_positions:
c.append(float(data[x]['priceChangePercent']))
i = sorted(range(len(c)), key=lambda k: c[k])
i.reverse()
while len(positions_final) < 20 and len(positions_final) < len(
all_positions):
symbols.append(data[all_positions[i[counter]]]['symbol'])
positions_final.append(all_positions[i[counter]])
volume.append(data[all_positions[i[counter]]]['quoteVolume'])
price_change.append(data[all_positions[i[counter]]][
'priceChangePercent'])
counter += 1
return symbols, volume, positions_final, price_change
def get_kline():
symbols, volume, pozitii, price_change = calculate_data_list()
prices = []
prices1 = []
k = []
for x in symbols:
try:
order = client.get_klines(symbol=x, interval='1m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
try:
order1 = client.get_klines(symbol=x, limit=1000, interval='15m')
except BinanceAPIException as e:
print(e.status_code)
print(e.message)
if len(order1) < 970:
a = symbols.index(x)
k.append(a)
else:
prices.append([])
prices1.append([])
for i in range(len(order)):
prices[-1].append(float(order[i][1]))
for i in range(len(order1)):
prices1[-1].append(float(order1[i][1]))
k.reverse()
for x in k:
symbols.pop(x)
volume.pop(x)
all_positions.pop(x)
price_change.pop(x)
return symbols, volume, pozitii, prices, prices1, price_change
def process_depth(msg):
sums5 = 0
sumb5 = 0
m = -1
for x in range(5):
if float(msg['data']['bids'][x][1]) > m:
m = float(msg['data']['bids'][x][1])
sums5 = sums5 + float(msg['data']['bids'][x][1])
sumb5 = sumb5 + float(msg['data']['asks'][x][1])
ratio1 = sums5 / sumb5
if ratio1 < 1:
ratio1 = 1 / ratio1 * -1 + 1
else:
ratio1 -= 1
sums20 = 0
sumb20 = 0
ratio2 = 0
try:
for x in range(17):
sums20 = sums20 + float(msg['data']['bids'][x][1])
sumb20 = sumb20 + float(msg['data']['asks'][x][1])
ratio2 = sums20 / sumb20
if ratio2 < 1:
ratio2 = 1 / ratio2 * -1 + 1
else:
ratio2 -= 1
except Exception as e:
print('')
for i in range(len(symbols)):
simbol = symbols[i].lower() + '@depth20'
if simbol == msg['stream']:
ratio5[i] = round(ratio1, 2)
ratio20[i] = round(ratio2, 2)
max_order5[i] = m
ratio5_sum[i] = round(float(sums5) * float(current_price[i]) *
100 / float(volume[i]), 2)
current_price[i] = float(msg['data']['bids'][0][0])
def process_ticker(msg):
i = 0
for x in symbols:
for y in range(len(msg)):
if x == str(msg[y]['s']):
volume[i] = int(float(msg[y]['q']))
price_change[i] = int(float(msg[y]['P']))
i += 1
symbols, volume, pozitii, k_line_1m, k_line_15m, price_change = get_kline()
max_order5 = [(0) for x in range(len(symbols))]
current_price = [(0) for x in range(len(symbols))]
price_chance_2_min = [(0) for x in range(len(symbols))]
price_chance_5_min = [(0) for x in range(len(symbols))]
price_chance_15_min = [(0) for x in range(len(symbols))]
price_chance_30_min = [(0) for x in range(len(symbols))]
price_change_25_30_min = [(0) for x in range(len(symbols))]
price_chance_1_hour = [(0) for x in range(len(symbols))]
price_chance_3_hour = [(0) for x in range(len(symbols))]
price_chance_8_hour = [(0) for x in range(len(symbols))]
price_change_1_days = [(0) for x in range(len(symbols))]
price_change_3_days = [(0) for x in range(len(symbols))]
price_change_5_days = [(0) for x in range(len(symbols))]
price_change_7_days = [(0) for x in range(len(symbols))]
price_change_10_days = [(0) for x in range(len(symbols))]
average_10_min = [(0) for x in range(len(symbols))]
average_20_min = [(0) for x in range(len(symbols))]
average_50_min = [(0) for x in range(len(symbols))]
average_100_min = [(0) for x in range(len(symbols))]
average_change_10_min = [(0) for x in range(len(symbols))]
average_change_20_min = [(0) for x in range(len(symbols))]
average_change_50_min = [(0) for x in range(len(symbols))]
average_change_100_min = [(0) for x in range(len(symbols))]
total_score = [(0) for x in range(len(symbols))]
ratio5 = [(0) for x in range(len(symbols))]
ratio5_10sec = [[] for y in range(len(symbols))]
ratio5_sum = [(0) for x in range(len(symbols))]
ratio5_sum_10sec = [[] for y in range(len(symbols))]
ratio20 = [(0) for x in range(len(symbols))]
list = []
for x in symbols:
list.append(x.lower() + '@depth20')
bm = BinanceSocketManager(client)
bm.start()
depth_socket = bm.start_multiplex_socket(list, process_depth)
ticker_socket = bm.start_ticker_socket(process_ticker)
def kline_continuum():
i = 0
while True:
time.sleep(60)
for x in range(len(symbols)):
k_line_1m[x].pop(0)
k_line_1m[x].append(current_price[x])
if i % 15 == 0:
k_line_15m[x].pop(0)
k_line_15m[x].append(current_price[x])
i += 1
def report_10_seconds():
while True:
for x in range(len(symbols)):
if len(ratio5_10sec[x]) > 10:
ratio5_10sec[x].pop(0)
if len(ratio5_sum_10sec[x]) > 10:
ratio5_sum_10sec[x].pop(0)
ratio5_10sec[x].append(ratio5[x])
ratio5_sum_10sec[x].append(ratio5_sum[x])
time.sleep(1)
def calculate_score():
for x in range(len(symbols)):
score = 0
a = float(price_chance_2_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 1.5:
score += 1.5
elif a >= 1.5 and a < 2:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_5_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 2:
score += 1.5
elif a >= 2 and a < 3:
score += 0.5
elif a >= 3:
score += 0.25
a = float(price_chance_15_min[x])
if a <= 1 and a > -0.5:
score += 0.25
elif a <= -0.5 and a > -1:
score += 0.5
elif a <= -1 and a > -1.5:
score += 0.75
elif a <= -1.5:
score += 1
a = float(price_change_25_30_min[x])
if a <= 2 and a > -0.75:
score += 0.25
elif a <= -0.75 and a > -1.25:
score += 0.5
elif a <= -1.25 and a > -1.75:
score += 0.75
elif a <= -1.75:
score += 1
a = float(price_chance_1_hour[x])
if a <= 2 and a >= 0:
score += 0.5
elif a <= 0 and a > -2:
score += 0.75
elif a <= -2:
score += 1
a = float(price_chance_3_hour[x])
if a <= 5 and a > -1:
score += 0.25
elif a <= -1 and a > -3:
score += 0.5
elif a <= -3 and a > -6:
score += 0.75
elif a <= -6:
score += 1
a = float(price_chance_8_hour[x])
if a <= 0 and a > -4:
score += 0.25
elif a <= -4 and a > -6:
score += 0.5
elif a <= -6:
score += 0.75
if float(ratio5[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])):
if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min
[x]) > 1:
if float(ratio5_10sec[x][i]) > 0:
a += 1
if float(ratio5_sum_10sec[x][i]) > 0.3:
a += 1
score += a / len(ratio5_sum_10sec[x])
if float(ratio20[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x]) - 1):
if float(ratio5_10sec[x][i]) > 0:
a += 1
if a <= 2:
score += 0.25
elif a > 2 and a <= 4:
score += 0.5
elif a > 4 and a <= 7:
score += 0.75
elif a > 7:
score += 1
a = 0
for i in range(20, 1, -1):
if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):
a += 1
score += a / 10
if float(price_change_1_days[x]) > 5:
score += 0.3
if float(price_change_3_days[x]) > 10:
score += 0.25
if float(price_change_5_days[x]) > 15:
score += 0.25
if float(price_change_7_days[x]) > 20:
score += 0.25
if float(price_change_10_days[x]) > -25:
score += 0.25
a = float(average_change_10_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_20_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_50_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
a = float(average_change_100_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
total_score[x] = score
def print_results():
time.sleep(10)
while True:
for x in range(len(symbols)):
try:
price_chance_2_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-2]) - 100, 2)
price_chance_5_min[x] = round(float(current_price[x]) * 100 /
float(k_line_1m[x][-5]) - 100, 2)
price_chance_15_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-15]) - 100, 2)
price_chance_30_min[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_chance_1_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-60]) - 100, 2)
price_chance_3_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][-180]) - 100, 2)
price_chance_8_hour[x] = round(float(current_price[x]) *
100 / float(k_line_1m[x][20]) - 100, 2)
price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *
100 / float(k_line_1m[x][-30]) - 100, 2)
price_change_1_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-96]) - 100, 1)
price_change_3_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-288]) - 100, 1)
price_change_5_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-480]) - 100, 1)
price_change_7_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-672]) - 100, 1)
price_change_10_days[x] = round(float(current_price[x]) *
100 / float(k_line_15m[x][-960]) - 100, 1)
average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) /
10, 8)
average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) /
20, 8)
average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) /
50, 8)
average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /
100, 8)
average_change_10_min[x] = round(float(current_price[x]) *
100 / float(average_10_min[x]) - 100, 2)
average_change_20_min[x] = round(float(current_price[x]) *
100 / float(average_20_min[x]) - 100, 2)
average_change_50_min[x] = round(float(current_price[x]) *
100 / float(average_50_min[x]) - 100, 2)
average_change_100_min[x] = round(float(current_price[x]) *
100 / float(average_100_min[x]) - 100, 2)
except Exception as e:
print(e)
calculate_score()
sort_by = total_score
sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])
sorted_data.reverse()
print(time.ctime())
print(
'%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',
'30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',
'25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))
for k in range(10):
i = sorted_data[k]
print(
'%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'
% (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],
price_chance_2_min[i], price_chance_5_min[i],
price_chance_15_min[i], price_chance_30_min[i],
price_chance_1_hour[i], average_change_10_min[i],
average_change_20_min[i], average_change_50_min[i],
average_change_100_min[i], price_chance_8_hour[i],
price_change_25_30_min[i], ratio5_sum[i],
price_change_1_days[i], price_change_3_days[i],
price_change_5_days[i], price_change_7_days[i],
price_change_10_days[i]))
try:
if float(total_score[sorted_data[0]]) > 10:
winsound.PlaySound('\\Sound.wav', winsound.SND_FILENAME)
except Exception as e:
print(e)
time.sleep(1)
threads = [threading.Thread(target=kline_continuum), threading.Thread(
target=report_10_seconds), threading.Thread(target=print_results)]
[thread.start() for thread in threads]
[thread.join() for thread in threads]
<|reserved_special_token_1|>
from binance.client import Client
from binance.websockets import BinanceSocketManager
from binance.enums import *
import time
import threading
import winsound
# Replace your_api_key, your_api_secret with your api_key, api_secret
client = Client(your_api_key, your_api_secret)
# Calculate list of symbols
def calculate_data_list():
counter=0
btc='BTC'
symbols=[]
all_positions=[]
positions_final=[]
volume=[]
c=[]
price_change = []
data=client.get_ticker()
for x in range(len(data)):
if (btc in data[x]['symbol']) and data[x]['symbol'] != 'BTCUSDT'and data[x]['symbol'] != 'VENBTC':
if float(data[x]['quoteVolume'])>100:
all_positions.append(x)
for x in all_positions:
c.append(float(data[x]['priceChangePercent']))
i = sorted(range(len(c)), key=lambda k: c[k])
i.reverse()
while (len(positions_final) < 20 and len(positions_final) < len(all_positions)):
symbols.append(data[all_positions[i[counter]]]['symbol'])
positions_final.append(all_positions[i[counter]])
volume.append(data[all_positions[i[counter]]]['quoteVolume'])
price_change.append(data[all_positions[i[counter]]]['priceChangePercent'])
counter += 1
return symbols, volume, positions_final, price_change
# Get candlestick data from Binance
def get_kline():
symbols, volume, pozitii,price_change = calculate_data_list()
prices = []
prices1 = []
k=[]
for x in symbols:
try:
order = client.get_klines( # Get 1 minute candlestick data from server
symbol=x,
interval='1m')
except BinanceAPIException as e:
print (e.status_code)
print (e.message)
try:
order1 = client.get_klines( # Get 15 minute candlestick data from server
symbol=x,
limit= 1000,
interval='15m')
except BinanceAPIException as e:
print (e.status_code)
print (e.message)
if len(order1) < 970: # check if coin have at least 10 days of data
a = symbols.index(x) # get index of x in symbols
k.append(a)
else:
prices.append([]) # add empty list to list of 1 minute
prices1.append([]) # add empty list to list of 15 minutes
for i in range(len(order)):
prices[-1].append(float(order[i][1])) # save 1 minute data
for i in range(len(order1)):
prices1[-1].append(float(order1[i][1])) # save 15 minute data
k.reverse()
for x in k:
symbols.pop(x)
volume.pop(x)
all_positions.pop(x)
price_change.pop(x)
return symbols, volume, pozitii, prices, prices1,price_change
# Calculate report between bid and ask offers
def process_depth(msg):
sums5=0
sumb5=0
m=-1
for x in range(5):
if float(msg['data']['bids'][x][1])>m:
m=float(msg['data']['bids'][x][1])
sums5 = sums5 + float(msg['data']['bids'][x][1])
sumb5 = sumb5 + float(msg['data']['asks'][x][1])
ratio1 = sums5 / sumb5
if (ratio1 < 1):
ratio1 = ((1 / ratio1) * -1) + 1
else:
ratio1 -= 1
sums20 = 0
sumb20 = 0
ratio2 = 0
try:
for x in range(17):
sums20 = sums20 + float(msg['data']['bids'][x][1])
sumb20 = sumb20 + float(msg['data']['asks'][x][1])
ratio2 = sums20 / sumb20
if (ratio2 < 1):
ratio2 = ((1 / ratio2) * -1) + 1
else:
ratio2 -= 1
except Exception as e:
print("")
for i in range(len(symbols)):
simbol = symbols[i].lower() + '@depth20'
if simbol == msg['stream']:
ratio5[i] = round(ratio1, 2)
ratio20[i] = round(ratio2, 2)
max_order5[i] = m
ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * 100 / float(volume[i]),2)
current_price[i] = float(msg['data']['bids'][0][0])
# Refresh price and volume to current price and volume
def process_ticker(msg):
i=0
for x in symbols:
for y in range(len(msg)):
if x == str(msg[y]['s']):
volume[i] = int(float(msg[y]['q']))
price_change[i] = int(float(msg[y]['P']))
i+=1
symbols,volume,pozitii,k_line_1m,k_line_15m,price_change =get_kline()
# Declaring lists necessary for storing data
max_order5=[0 for x in range(len(symbols))]
current_price= [0 for x in range(len(symbols))]
price_chance_2_min = [0 for x in range(len(symbols))]
price_chance_5_min = [0 for x in range(len(symbols))]
price_chance_15_min = [0 for x in range(len(symbols))]
price_chance_30_min = [0 for x in range(len(symbols))]
price_change_25_30_min = [0 for x in range(len(symbols))]
price_chance_1_hour = [0 for x in range(len(symbols))]
price_chance_3_hour = [0 for x in range(len(symbols))]
price_chance_8_hour = [0 for x in range(len(symbols))]
price_change_1_days = [0 for x in range(len(symbols))]
price_change_3_days = [0 for x in range(len(symbols))]
price_change_5_days = [0 for x in range(len(symbols))]
price_change_7_days = [0 for x in range(len(symbols))]
price_change_10_days = [0 for x in range(len(symbols))]
average_10_min = [0 for x in range(len(symbols))]
average_20_min = [0 for x in range(len(symbols))]
average_50_min = [0 for x in range(len(symbols))]
average_100_min = [0 for x in range(len(symbols))]
average_change_10_min = [0 for x in range(len(symbols))]
average_change_20_min = [0 for x in range(len(symbols))]
average_change_50_min = [0 for x in range(len(symbols))]
average_change_100_min = [0 for x in range(len(symbols))]
total_score = [0 for x in range(len(symbols))]
ratio5=[0 for x in range(len(symbols))]
ratio5_10sec=[[] for y in range(len(symbols))]
ratio5_sum = [0 for x in range(len(symbols))]
ratio5_sum_10sec = [[] for y in range(len(symbols))]
ratio20= [0 for x in range(len(symbols))]
# Create list neccessary for depth socked
list=[]
for x in symbols:
list.append(x.lower()+'@depth20') # append @depth20 to each symbol and add it into list
bm = BinanceSocketManager(client)
bm.start()
depth_socket = bm.start_multiplex_socket(list,process_depth) # start depth socket
ticker_socket = bm.start_ticker_socket(process_ticker) # start price socket
# maintain candlestick lists
def kline_continuum():
i=0
while True:
time.sleep(60)
for x in range(len(symbols)):
k_line_1m[x].pop(0)
k_line_1m[x].append(current_price[x]) # add price to list of 1 minute candlestick every 1 minute
if i%15==0:
k_line_15m[x].pop(0)
k_line_15m[x].append(current_price[x]) # add price to list of 15 minute candlestick every 15 minute
i+=1
# Save report between ask and bit for the last 10 seconds
def report_10_seconds():
while True:
for x in range(len(symbols)):
if len(ratio5_10sec[x])>10:
ratio5_10sec[x].pop(0)
if len(ratio5_sum_10sec[x]) > 10:
ratio5_sum_10sec[x].pop(0)
ratio5_10sec[x].append(ratio5[x])
ratio5_sum_10sec[x].append(ratio5_sum[x])
time.sleep(1)
# Calculate score for each symbol, you can add as many parameters as you want
def calculate_score():
for x in range(len(symbols)):
score = 0
# 2 minute change parameter score calculation
a = float(price_chance_2_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 1.5:
score += 1.5
elif a >= 1.5 and a < 2:
score += 0.5
elif a >= 3:
score += 0.25
# 5 minute change parameter score calculation
a = float(price_chance_5_min[x])
if a > 0 and a < 0.5:
score += 1
elif a >= 0.5 and a < 1:
score += 1.25
elif a >= 1 and a < 2:
score += 1.5
elif a >= 2 and a < 3:
score += 0.5
elif a >= 3:
score += 0.25
# 15 minute change parameter score calculation
a = float(price_chance_15_min[x])
if a <= 1 and a > -0.5:
score += 0.25
elif a <= -0.5 and a > -1:
score += 0.5
elif a <= -1 and a > -1.5:
score += 0.75
elif a <= -1.5:
score += 1
# change between 25 and 30 minutes ago parameter score calculation
a = float(price_change_25_30_min[x])
if a <= 2 and a > -0.75:
score += 0.25
elif a <= -0.75 and a > -1.25:
score += 0.5
elif a <= -1.25 and a > -1.75:
score += 0.75
elif a <= -1.75:
score += 1
# 1 hour change parameter score calculation
a = float(price_chance_1_hour[x])
if a <= 2 and a >= 0:
score += 0.5
elif a <= 0 and a > -2:
score += 0.75
elif a <= -2:
score += 1
# 3 hour change parameter score calculation
a = float(price_chance_3_hour[x])
if a <= 5 and a > -1:
score += 0.25
elif a <= -1 and a > -3:
score += 0.5
elif a <= -3 and a > -6:
score += 0.75
elif a <= -6:
score += 1
# 8 hour change parameter score calculation
a = float(price_chance_8_hour[x])
if a <= 0 and a > -4:
score += 0.25
elif a <= -4 and a > -6:
score += 0.5
elif a <= -6:
score += 0.75
if float(ratio5[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])):
if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min[x]) > 1:
if float(ratio5_10sec[x][i]) > 0:
a += 1
if float(ratio5_sum_10sec[x][i]) > 0.3:
a += 1
score += a / len(ratio5_sum_10sec[x])
if float(ratio20[x]) > 0:
score += 1
a = 0
for i in range(len(ratio5_10sec[x])-1):
if float(ratio5_10sec[x][i]) > 0:
a += 1
if a <= 2:
score += 0.25
elif a > 2 and a <= 4:
score += 0.5
elif a > 4 and a <= 7:
score += 0.75
elif a > 7:
score += 1
a = 0
for i in range(20, 1, -1):
if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):
a += 1
score += a / 10
# 1 day change parameter score calculation
if float(price_change_1_days[x]) > 5:
score+=0.3
# 3 day change parameter score calculation
if float(price_change_3_days[x]) > 10:
score += 0.25
# 5 day change parameter score calculation
if float(price_change_5_days[x]) > 15:
score += 0.25
# 7 day change parameter score calculation
if float(price_change_7_days[x]) > 20:
score += 0.25
# 10 day change parameter score calculation
if float(price_change_10_days[x]) > -25:
score += 0.25
# 10 minutes moving average parameter score calculation
a=float(average_change_10_min[x])
if a<0.2 and a>-0.3:
score+=0.1
# 20 minutes moving average parameter score calculation
a = float(average_change_20_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
# 50 minutes moving average parameter score calculation
a = float(average_change_50_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
# 100 minutes moving average parameter score calculation
a = float(average_change_100_min[x])
if a < 0.2 and a > -0.3:
score += 0.1
# save score
total_score[x] = score
def print_results():
# sleep time before starting calculations
time.sleep(10)
while True:
for x in range(len(symbols)):
# calculate parameters percentages
try:
price_chance_2_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 2]) - 100, 2)
price_chance_5_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 5]) - 100, 2)
price_chance_15_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 15]) - 100, 2)
price_chance_30_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 30]) - 100, 2)
price_chance_1_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 60]) - 100, 2)
price_chance_3_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 180]) - 100, 2)
price_chance_8_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][20]) - 100, 2)
price_change_25_30_min[x] = round(float(k_line_1m[x][- 6]) * 100 / float(k_line_1m[x][- 30]) - 100, 2)
price_change_1_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 96]) - 100, 1)
price_change_3_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 288]) - 100, 1)
price_change_5_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 480] )- 100, 1)
price_change_7_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 672]) - 100, 1)
price_change_10_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 960]) - 100, 1)
average_10_min[x] = round(float(sum(k_line_1m[x][- 10:])) / 10, 8)
average_20_min[x] = round(float(sum(k_line_1m[x][- 20:])) / 20, 8)
average_50_min[x] = round(float(sum(k_line_1m[x][- 50:])) / 50, 8)
average_100_min[x] = round(float(sum(k_line_1m[x][- 100:])) / 100, 8)
average_change_10_min[x] = round(float(current_price[x]) * 100 / float(average_10_min[x]) - 100, 2)
average_change_20_min[x] = round(float(current_price[x]) * 100 / float(average_20_min[x]) - 100, 2)
average_change_50_min[x] = round(float(current_price[x]) * 100 / float(average_50_min[x]) - 100, 2)
average_change_100_min[x] = round(float(current_price[x]) * 100 / float(average_100_min[x]) - 100, 2)
except Exception as e:
print(e)
# call function for score calculation
calculate_score()
# select parameter for which data is sorted
sort_by = total_score
# sort data
sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])
# sort data in reverse order
sorted_data.reverse()
#print table header
print (time.ctime())
print ('%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s' % (
'Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch', '30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',
'25-30m', 'r5sum', '1d_ch', '3d_ch','5d_ch', '7d_ch', '10d_ch'))
# print top 10 cryptocurrencies data
for k in range(10):
i = sorted_data[k]
print ('%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s' % (
symbols[i][:-3], total_score[i], ratio5[i], ratio20[i], price_chance_2_min[i], price_chance_5_min[i],
price_chance_15_min[i],price_chance_30_min[i], price_chance_1_hour[i], average_change_10_min[i],
average_change_20_min[i],average_change_50_min[i], average_change_100_min[i], price_chance_8_hour[i],
price_change_25_30_min[i], ratio5_sum[i], price_change_1_days[i], price_change_3_days[i],
price_change_5_days[i], price_change_7_days[i], price_change_10_days[i]))
# if score for one coin is > 10 will play sound
try:
if float(total_score[sorted_data[0]]) > 10:
winsound.PlaySound('\\Sound.wav', winsound.SND_FILENAME)
except Exception as e:
print(e)
# Seconds to wait before repeating while loop
time.sleep(1)
# Declaring threads
threads = [threading.Thread(target=kline_continuum),
threading.Thread(target=report_10_seconds),
threading.Thread(target=print_results)]
# Starting threads
[thread.start() for thread in threads]
[thread.join() for thread in threads]
|
flexible
|
{
"blob_id": "dcc85b143f2394b7839f2fb9c2079a7dd9fa8e88",
"index": 4733,
"step-1": "<mask token>\n\n\ndef calculate_data_list():\n counter = 0\n btc = 'BTC'\n symbols = []\n all_positions = []\n positions_final = []\n volume = []\n c = []\n price_change = []\n data = client.get_ticker()\n for x in range(len(data)):\n if btc in data[x]['symbol'] and data[x]['symbol'\n ] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':\n if float(data[x]['quoteVolume']) > 100:\n all_positions.append(x)\n for x in all_positions:\n c.append(float(data[x]['priceChangePercent']))\n i = sorted(range(len(c)), key=lambda k: c[k])\n i.reverse()\n while len(positions_final) < 20 and len(positions_final) < len(\n all_positions):\n symbols.append(data[all_positions[i[counter]]]['symbol'])\n positions_final.append(all_positions[i[counter]])\n volume.append(data[all_positions[i[counter]]]['quoteVolume'])\n price_change.append(data[all_positions[i[counter]]][\n 'priceChangePercent'])\n counter += 1\n return symbols, volume, positions_final, price_change\n\n\ndef get_kline():\n symbols, volume, pozitii, price_change = calculate_data_list()\n prices = []\n prices1 = []\n k = []\n for x in symbols:\n try:\n order = client.get_klines(symbol=x, interval='1m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n try:\n order1 = client.get_klines(symbol=x, limit=1000, interval='15m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n if len(order1) < 970:\n a = symbols.index(x)\n k.append(a)\n else:\n prices.append([])\n prices1.append([])\n for i in range(len(order)):\n prices[-1].append(float(order[i][1]))\n for i in range(len(order1)):\n prices1[-1].append(float(order1[i][1]))\n k.reverse()\n for x in k:\n symbols.pop(x)\n volume.pop(x)\n all_positions.pop(x)\n price_change.pop(x)\n return symbols, volume, pozitii, prices, prices1, price_change\n\n\ndef process_depth(msg):\n sums5 = 0\n sumb5 = 0\n m = -1\n for x in range(5):\n if float(msg['data']['bids'][x][1]) > m:\n m = float(msg['data']['bids'][x][1])\n sums5 = sums5 + float(msg['data']['bids'][x][1])\n sumb5 = sumb5 + float(msg['data']['asks'][x][1])\n ratio1 = sums5 / sumb5\n if ratio1 < 1:\n ratio1 = 1 / ratio1 * -1 + 1\n else:\n ratio1 -= 1\n sums20 = 0\n sumb20 = 0\n ratio2 = 0\n try:\n for x in range(17):\n sums20 = sums20 + float(msg['data']['bids'][x][1])\n sumb20 = sumb20 + float(msg['data']['asks'][x][1])\n ratio2 = sums20 / sumb20\n if ratio2 < 1:\n ratio2 = 1 / ratio2 * -1 + 1\n else:\n ratio2 -= 1\n except Exception as e:\n print('')\n for i in range(len(symbols)):\n simbol = symbols[i].lower() + '@depth20'\n if simbol == msg['stream']:\n ratio5[i] = round(ratio1, 2)\n ratio20[i] = round(ratio2, 2)\n max_order5[i] = m\n ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * \n 100 / float(volume[i]), 2)\n current_price[i] = float(msg['data']['bids'][0][0])\n\n\n<mask token>\n\n\ndef kline_continuum():\n i = 0\n while True:\n time.sleep(60)\n for x in range(len(symbols)):\n k_line_1m[x].pop(0)\n k_line_1m[x].append(current_price[x])\n if i % 15 == 0:\n k_line_15m[x].pop(0)\n k_line_15m[x].append(current_price[x])\n i += 1\n\n\n<mask token>\n\n\ndef calculate_score():\n for x in range(len(symbols)):\n score = 0\n a = float(price_chance_2_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 1.5:\n score += 1.5\n elif a >= 1.5 and a < 2:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_5_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 2:\n score += 1.5\n elif a >= 2 and a < 3:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_15_min[x])\n if a <= 1 and a > -0.5:\n score += 0.25\n elif a <= -0.5 and a > -1:\n score += 0.5\n elif a <= -1 and a > -1.5:\n score += 0.75\n elif a <= -1.5:\n score += 1\n a = float(price_change_25_30_min[x])\n if a <= 2 and a > -0.75:\n score += 0.25\n elif a <= -0.75 and a > -1.25:\n score += 0.5\n elif a <= -1.25 and a > -1.75:\n score += 0.75\n elif a <= -1.75:\n score += 1\n a = float(price_chance_1_hour[x])\n if a <= 2 and a >= 0:\n score += 0.5\n elif a <= 0 and a > -2:\n score += 0.75\n elif a <= -2:\n score += 1\n a = float(price_chance_3_hour[x])\n if a <= 5 and a > -1:\n score += 0.25\n elif a <= -1 and a > -3:\n score += 0.5\n elif a <= -3 and a > -6:\n score += 0.75\n elif a <= -6:\n score += 1\n a = float(price_chance_8_hour[x])\n if a <= 0 and a > -4:\n score += 0.25\n elif a <= -4 and a > -6:\n score += 0.5\n elif a <= -6:\n score += 0.75\n if float(ratio5[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x])):\n if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min\n [x]) > 1:\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if float(ratio5_sum_10sec[x][i]) > 0.3:\n a += 1\n score += a / len(ratio5_sum_10sec[x])\n if float(ratio20[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x]) - 1):\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if a <= 2:\n score += 0.25\n elif a > 2 and a <= 4:\n score += 0.5\n elif a > 4 and a <= 7:\n score += 0.75\n elif a > 7:\n score += 1\n a = 0\n for i in range(20, 1, -1):\n if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):\n a += 1\n score += a / 10\n if float(price_change_1_days[x]) > 5:\n score += 0.3\n if float(price_change_3_days[x]) > 10:\n score += 0.25\n if float(price_change_5_days[x]) > 15:\n score += 0.25\n if float(price_change_7_days[x]) > 20:\n score += 0.25\n if float(price_change_10_days[x]) > -25:\n score += 0.25\n a = float(average_change_10_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_20_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_50_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_100_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n total_score[x] = score\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef calculate_data_list():\n counter = 0\n btc = 'BTC'\n symbols = []\n all_positions = []\n positions_final = []\n volume = []\n c = []\n price_change = []\n data = client.get_ticker()\n for x in range(len(data)):\n if btc in data[x]['symbol'] and data[x]['symbol'\n ] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':\n if float(data[x]['quoteVolume']) > 100:\n all_positions.append(x)\n for x in all_positions:\n c.append(float(data[x]['priceChangePercent']))\n i = sorted(range(len(c)), key=lambda k: c[k])\n i.reverse()\n while len(positions_final) < 20 and len(positions_final) < len(\n all_positions):\n symbols.append(data[all_positions[i[counter]]]['symbol'])\n positions_final.append(all_positions[i[counter]])\n volume.append(data[all_positions[i[counter]]]['quoteVolume'])\n price_change.append(data[all_positions[i[counter]]][\n 'priceChangePercent'])\n counter += 1\n return symbols, volume, positions_final, price_change\n\n\ndef get_kline():\n symbols, volume, pozitii, price_change = calculate_data_list()\n prices = []\n prices1 = []\n k = []\n for x in symbols:\n try:\n order = client.get_klines(symbol=x, interval='1m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n try:\n order1 = client.get_klines(symbol=x, limit=1000, interval='15m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n if len(order1) < 970:\n a = symbols.index(x)\n k.append(a)\n else:\n prices.append([])\n prices1.append([])\n for i in range(len(order)):\n prices[-1].append(float(order[i][1]))\n for i in range(len(order1)):\n prices1[-1].append(float(order1[i][1]))\n k.reverse()\n for x in k:\n symbols.pop(x)\n volume.pop(x)\n all_positions.pop(x)\n price_change.pop(x)\n return symbols, volume, pozitii, prices, prices1, price_change\n\n\ndef process_depth(msg):\n sums5 = 0\n sumb5 = 0\n m = -1\n for x in range(5):\n if float(msg['data']['bids'][x][1]) > m:\n m = float(msg['data']['bids'][x][1])\n sums5 = sums5 + float(msg['data']['bids'][x][1])\n sumb5 = sumb5 + float(msg['data']['asks'][x][1])\n ratio1 = sums5 / sumb5\n if ratio1 < 1:\n ratio1 = 1 / ratio1 * -1 + 1\n else:\n ratio1 -= 1\n sums20 = 0\n sumb20 = 0\n ratio2 = 0\n try:\n for x in range(17):\n sums20 = sums20 + float(msg['data']['bids'][x][1])\n sumb20 = sumb20 + float(msg['data']['asks'][x][1])\n ratio2 = sums20 / sumb20\n if ratio2 < 1:\n ratio2 = 1 / ratio2 * -1 + 1\n else:\n ratio2 -= 1\n except Exception as e:\n print('')\n for i in range(len(symbols)):\n simbol = symbols[i].lower() + '@depth20'\n if simbol == msg['stream']:\n ratio5[i] = round(ratio1, 2)\n ratio20[i] = round(ratio2, 2)\n max_order5[i] = m\n ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * \n 100 / float(volume[i]), 2)\n current_price[i] = float(msg['data']['bids'][0][0])\n\n\ndef process_ticker(msg):\n i = 0\n for x in symbols:\n for y in range(len(msg)):\n if x == str(msg[y]['s']):\n volume[i] = int(float(msg[y]['q']))\n price_change[i] = int(float(msg[y]['P']))\n i += 1\n\n\n<mask token>\n\n\ndef kline_continuum():\n i = 0\n while True:\n time.sleep(60)\n for x in range(len(symbols)):\n k_line_1m[x].pop(0)\n k_line_1m[x].append(current_price[x])\n if i % 15 == 0:\n k_line_15m[x].pop(0)\n k_line_15m[x].append(current_price[x])\n i += 1\n\n\ndef report_10_seconds():\n while True:\n for x in range(len(symbols)):\n if len(ratio5_10sec[x]) > 10:\n ratio5_10sec[x].pop(0)\n if len(ratio5_sum_10sec[x]) > 10:\n ratio5_sum_10sec[x].pop(0)\n ratio5_10sec[x].append(ratio5[x])\n ratio5_sum_10sec[x].append(ratio5_sum[x])\n time.sleep(1)\n\n\ndef calculate_score():\n for x in range(len(symbols)):\n score = 0\n a = float(price_chance_2_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 1.5:\n score += 1.5\n elif a >= 1.5 and a < 2:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_5_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 2:\n score += 1.5\n elif a >= 2 and a < 3:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_15_min[x])\n if a <= 1 and a > -0.5:\n score += 0.25\n elif a <= -0.5 and a > -1:\n score += 0.5\n elif a <= -1 and a > -1.5:\n score += 0.75\n elif a <= -1.5:\n score += 1\n a = float(price_change_25_30_min[x])\n if a <= 2 and a > -0.75:\n score += 0.25\n elif a <= -0.75 and a > -1.25:\n score += 0.5\n elif a <= -1.25 and a > -1.75:\n score += 0.75\n elif a <= -1.75:\n score += 1\n a = float(price_chance_1_hour[x])\n if a <= 2 and a >= 0:\n score += 0.5\n elif a <= 0 and a > -2:\n score += 0.75\n elif a <= -2:\n score += 1\n a = float(price_chance_3_hour[x])\n if a <= 5 and a > -1:\n score += 0.25\n elif a <= -1 and a > -3:\n score += 0.5\n elif a <= -3 and a > -6:\n score += 0.75\n elif a <= -6:\n score += 1\n a = float(price_chance_8_hour[x])\n if a <= 0 and a > -4:\n score += 0.25\n elif a <= -4 and a > -6:\n score += 0.5\n elif a <= -6:\n score += 0.75\n if float(ratio5[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x])):\n if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min\n [x]) > 1:\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if float(ratio5_sum_10sec[x][i]) > 0.3:\n a += 1\n score += a / len(ratio5_sum_10sec[x])\n if float(ratio20[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x]) - 1):\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if a <= 2:\n score += 0.25\n elif a > 2 and a <= 4:\n score += 0.5\n elif a > 4 and a <= 7:\n score += 0.75\n elif a > 7:\n score += 1\n a = 0\n for i in range(20, 1, -1):\n if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):\n a += 1\n score += a / 10\n if float(price_change_1_days[x]) > 5:\n score += 0.3\n if float(price_change_3_days[x]) > 10:\n score += 0.25\n if float(price_change_5_days[x]) > 15:\n score += 0.25\n if float(price_change_7_days[x]) > 20:\n score += 0.25\n if float(price_change_10_days[x]) > -25:\n score += 0.25\n a = float(average_change_10_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_20_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_50_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_100_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n total_score[x] = score\n\n\ndef print_results():\n time.sleep(10)\n while True:\n for x in range(len(symbols)):\n try:\n price_chance_2_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-2]) - 100, 2)\n price_chance_5_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-5]) - 100, 2)\n price_chance_15_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-15]) - 100, 2)\n price_chance_30_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_chance_1_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-60]) - 100, 2)\n price_chance_3_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-180]) - 100, 2)\n price_chance_8_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][20]) - 100, 2)\n price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *\n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_change_1_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-96]) - 100, 1)\n price_change_3_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-288]) - 100, 1)\n price_change_5_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-480]) - 100, 1)\n price_change_7_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-672]) - 100, 1)\n price_change_10_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-960]) - 100, 1)\n average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) / \n 10, 8)\n average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) / \n 20, 8)\n average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) / \n 50, 8)\n average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /\n 100, 8)\n average_change_10_min[x] = round(float(current_price[x]) * \n 100 / float(average_10_min[x]) - 100, 2)\n average_change_20_min[x] = round(float(current_price[x]) * \n 100 / float(average_20_min[x]) - 100, 2)\n average_change_50_min[x] = round(float(current_price[x]) * \n 100 / float(average_50_min[x]) - 100, 2)\n average_change_100_min[x] = round(float(current_price[x]) *\n 100 / float(average_100_min[x]) - 100, 2)\n except Exception as e:\n print(e)\n calculate_score()\n sort_by = total_score\n sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])\n sorted_data.reverse()\n print(time.ctime())\n print(\n '%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',\n '30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',\n '25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))\n for k in range(10):\n i = sorted_data[k]\n print(\n '%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],\n price_chance_2_min[i], price_chance_5_min[i],\n price_chance_15_min[i], price_chance_30_min[i],\n price_chance_1_hour[i], average_change_10_min[i],\n average_change_20_min[i], average_change_50_min[i],\n average_change_100_min[i], price_chance_8_hour[i],\n price_change_25_30_min[i], ratio5_sum[i],\n price_change_1_days[i], price_change_3_days[i],\n price_change_5_days[i], price_change_7_days[i],\n price_change_10_days[i]))\n try:\n if float(total_score[sorted_data[0]]) > 10:\n winsound.PlaySound('\\\\Sound.wav', winsound.SND_FILENAME)\n except Exception as e:\n print(e)\n time.sleep(1)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef calculate_data_list():\n counter = 0\n btc = 'BTC'\n symbols = []\n all_positions = []\n positions_final = []\n volume = []\n c = []\n price_change = []\n data = client.get_ticker()\n for x in range(len(data)):\n if btc in data[x]['symbol'] and data[x]['symbol'\n ] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':\n if float(data[x]['quoteVolume']) > 100:\n all_positions.append(x)\n for x in all_positions:\n c.append(float(data[x]['priceChangePercent']))\n i = sorted(range(len(c)), key=lambda k: c[k])\n i.reverse()\n while len(positions_final) < 20 and len(positions_final) < len(\n all_positions):\n symbols.append(data[all_positions[i[counter]]]['symbol'])\n positions_final.append(all_positions[i[counter]])\n volume.append(data[all_positions[i[counter]]]['quoteVolume'])\n price_change.append(data[all_positions[i[counter]]][\n 'priceChangePercent'])\n counter += 1\n return symbols, volume, positions_final, price_change\n\n\ndef get_kline():\n symbols, volume, pozitii, price_change = calculate_data_list()\n prices = []\n prices1 = []\n k = []\n for x in symbols:\n try:\n order = client.get_klines(symbol=x, interval='1m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n try:\n order1 = client.get_klines(symbol=x, limit=1000, interval='15m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n if len(order1) < 970:\n a = symbols.index(x)\n k.append(a)\n else:\n prices.append([])\n prices1.append([])\n for i in range(len(order)):\n prices[-1].append(float(order[i][1]))\n for i in range(len(order1)):\n prices1[-1].append(float(order1[i][1]))\n k.reverse()\n for x in k:\n symbols.pop(x)\n volume.pop(x)\n all_positions.pop(x)\n price_change.pop(x)\n return symbols, volume, pozitii, prices, prices1, price_change\n\n\ndef process_depth(msg):\n sums5 = 0\n sumb5 = 0\n m = -1\n for x in range(5):\n if float(msg['data']['bids'][x][1]) > m:\n m = float(msg['data']['bids'][x][1])\n sums5 = sums5 + float(msg['data']['bids'][x][1])\n sumb5 = sumb5 + float(msg['data']['asks'][x][1])\n ratio1 = sums5 / sumb5\n if ratio1 < 1:\n ratio1 = 1 / ratio1 * -1 + 1\n else:\n ratio1 -= 1\n sums20 = 0\n sumb20 = 0\n ratio2 = 0\n try:\n for x in range(17):\n sums20 = sums20 + float(msg['data']['bids'][x][1])\n sumb20 = sumb20 + float(msg['data']['asks'][x][1])\n ratio2 = sums20 / sumb20\n if ratio2 < 1:\n ratio2 = 1 / ratio2 * -1 + 1\n else:\n ratio2 -= 1\n except Exception as e:\n print('')\n for i in range(len(symbols)):\n simbol = symbols[i].lower() + '@depth20'\n if simbol == msg['stream']:\n ratio5[i] = round(ratio1, 2)\n ratio20[i] = round(ratio2, 2)\n max_order5[i] = m\n ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * \n 100 / float(volume[i]), 2)\n current_price[i] = float(msg['data']['bids'][0][0])\n\n\ndef process_ticker(msg):\n i = 0\n for x in symbols:\n for y in range(len(msg)):\n if x == str(msg[y]['s']):\n volume[i] = int(float(msg[y]['q']))\n price_change[i] = int(float(msg[y]['P']))\n i += 1\n\n\n<mask token>\nfor x in symbols:\n list.append(x.lower() + '@depth20')\n<mask token>\nbm.start()\n<mask token>\n\n\ndef kline_continuum():\n i = 0\n while True:\n time.sleep(60)\n for x in range(len(symbols)):\n k_line_1m[x].pop(0)\n k_line_1m[x].append(current_price[x])\n if i % 15 == 0:\n k_line_15m[x].pop(0)\n k_line_15m[x].append(current_price[x])\n i += 1\n\n\ndef report_10_seconds():\n while True:\n for x in range(len(symbols)):\n if len(ratio5_10sec[x]) > 10:\n ratio5_10sec[x].pop(0)\n if len(ratio5_sum_10sec[x]) > 10:\n ratio5_sum_10sec[x].pop(0)\n ratio5_10sec[x].append(ratio5[x])\n ratio5_sum_10sec[x].append(ratio5_sum[x])\n time.sleep(1)\n\n\ndef calculate_score():\n for x in range(len(symbols)):\n score = 0\n a = float(price_chance_2_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 1.5:\n score += 1.5\n elif a >= 1.5 and a < 2:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_5_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 2:\n score += 1.5\n elif a >= 2 and a < 3:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_15_min[x])\n if a <= 1 and a > -0.5:\n score += 0.25\n elif a <= -0.5 and a > -1:\n score += 0.5\n elif a <= -1 and a > -1.5:\n score += 0.75\n elif a <= -1.5:\n score += 1\n a = float(price_change_25_30_min[x])\n if a <= 2 and a > -0.75:\n score += 0.25\n elif a <= -0.75 and a > -1.25:\n score += 0.5\n elif a <= -1.25 and a > -1.75:\n score += 0.75\n elif a <= -1.75:\n score += 1\n a = float(price_chance_1_hour[x])\n if a <= 2 and a >= 0:\n score += 0.5\n elif a <= 0 and a > -2:\n score += 0.75\n elif a <= -2:\n score += 1\n a = float(price_chance_3_hour[x])\n if a <= 5 and a > -1:\n score += 0.25\n elif a <= -1 and a > -3:\n score += 0.5\n elif a <= -3 and a > -6:\n score += 0.75\n elif a <= -6:\n score += 1\n a = float(price_chance_8_hour[x])\n if a <= 0 and a > -4:\n score += 0.25\n elif a <= -4 and a > -6:\n score += 0.5\n elif a <= -6:\n score += 0.75\n if float(ratio5[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x])):\n if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min\n [x]) > 1:\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if float(ratio5_sum_10sec[x][i]) > 0.3:\n a += 1\n score += a / len(ratio5_sum_10sec[x])\n if float(ratio20[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x]) - 1):\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if a <= 2:\n score += 0.25\n elif a > 2 and a <= 4:\n score += 0.5\n elif a > 4 and a <= 7:\n score += 0.75\n elif a > 7:\n score += 1\n a = 0\n for i in range(20, 1, -1):\n if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):\n a += 1\n score += a / 10\n if float(price_change_1_days[x]) > 5:\n score += 0.3\n if float(price_change_3_days[x]) > 10:\n score += 0.25\n if float(price_change_5_days[x]) > 15:\n score += 0.25\n if float(price_change_7_days[x]) > 20:\n score += 0.25\n if float(price_change_10_days[x]) > -25:\n score += 0.25\n a = float(average_change_10_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_20_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_50_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_100_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n total_score[x] = score\n\n\ndef print_results():\n time.sleep(10)\n while True:\n for x in range(len(symbols)):\n try:\n price_chance_2_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-2]) - 100, 2)\n price_chance_5_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-5]) - 100, 2)\n price_chance_15_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-15]) - 100, 2)\n price_chance_30_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_chance_1_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-60]) - 100, 2)\n price_chance_3_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-180]) - 100, 2)\n price_chance_8_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][20]) - 100, 2)\n price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *\n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_change_1_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-96]) - 100, 1)\n price_change_3_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-288]) - 100, 1)\n price_change_5_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-480]) - 100, 1)\n price_change_7_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-672]) - 100, 1)\n price_change_10_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-960]) - 100, 1)\n average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) / \n 10, 8)\n average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) / \n 20, 8)\n average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) / \n 50, 8)\n average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /\n 100, 8)\n average_change_10_min[x] = round(float(current_price[x]) * \n 100 / float(average_10_min[x]) - 100, 2)\n average_change_20_min[x] = round(float(current_price[x]) * \n 100 / float(average_20_min[x]) - 100, 2)\n average_change_50_min[x] = round(float(current_price[x]) * \n 100 / float(average_50_min[x]) - 100, 2)\n average_change_100_min[x] = round(float(current_price[x]) *\n 100 / float(average_100_min[x]) - 100, 2)\n except Exception as e:\n print(e)\n calculate_score()\n sort_by = total_score\n sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])\n sorted_data.reverse()\n print(time.ctime())\n print(\n '%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',\n '30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',\n '25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))\n for k in range(10):\n i = sorted_data[k]\n print(\n '%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],\n price_chance_2_min[i], price_chance_5_min[i],\n price_chance_15_min[i], price_chance_30_min[i],\n price_chance_1_hour[i], average_change_10_min[i],\n average_change_20_min[i], average_change_50_min[i],\n average_change_100_min[i], price_chance_8_hour[i],\n price_change_25_30_min[i], ratio5_sum[i],\n price_change_1_days[i], price_change_3_days[i],\n price_change_5_days[i], price_change_7_days[i],\n price_change_10_days[i]))\n try:\n if float(total_score[sorted_data[0]]) > 10:\n winsound.PlaySound('\\\\Sound.wav', winsound.SND_FILENAME)\n except Exception as e:\n print(e)\n time.sleep(1)\n\n\n<mask token>\n[thread.start() for thread in threads]\n[thread.join() for thread in threads]\n",
"step-4": "from binance.client import Client\nfrom binance.websockets import BinanceSocketManager\nfrom binance.enums import *\nimport time\nimport threading\nimport winsound\nclient = Client(your_api_key, your_api_secret)\n\n\ndef calculate_data_list():\n counter = 0\n btc = 'BTC'\n symbols = []\n all_positions = []\n positions_final = []\n volume = []\n c = []\n price_change = []\n data = client.get_ticker()\n for x in range(len(data)):\n if btc in data[x]['symbol'] and data[x]['symbol'\n ] != 'BTCUSDT' and data[x]['symbol'] != 'VENBTC':\n if float(data[x]['quoteVolume']) > 100:\n all_positions.append(x)\n for x in all_positions:\n c.append(float(data[x]['priceChangePercent']))\n i = sorted(range(len(c)), key=lambda k: c[k])\n i.reverse()\n while len(positions_final) < 20 and len(positions_final) < len(\n all_positions):\n symbols.append(data[all_positions[i[counter]]]['symbol'])\n positions_final.append(all_positions[i[counter]])\n volume.append(data[all_positions[i[counter]]]['quoteVolume'])\n price_change.append(data[all_positions[i[counter]]][\n 'priceChangePercent'])\n counter += 1\n return symbols, volume, positions_final, price_change\n\n\ndef get_kline():\n symbols, volume, pozitii, price_change = calculate_data_list()\n prices = []\n prices1 = []\n k = []\n for x in symbols:\n try:\n order = client.get_klines(symbol=x, interval='1m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n try:\n order1 = client.get_klines(symbol=x, limit=1000, interval='15m')\n except BinanceAPIException as e:\n print(e.status_code)\n print(e.message)\n if len(order1) < 970:\n a = symbols.index(x)\n k.append(a)\n else:\n prices.append([])\n prices1.append([])\n for i in range(len(order)):\n prices[-1].append(float(order[i][1]))\n for i in range(len(order1)):\n prices1[-1].append(float(order1[i][1]))\n k.reverse()\n for x in k:\n symbols.pop(x)\n volume.pop(x)\n all_positions.pop(x)\n price_change.pop(x)\n return symbols, volume, pozitii, prices, prices1, price_change\n\n\ndef process_depth(msg):\n sums5 = 0\n sumb5 = 0\n m = -1\n for x in range(5):\n if float(msg['data']['bids'][x][1]) > m:\n m = float(msg['data']['bids'][x][1])\n sums5 = sums5 + float(msg['data']['bids'][x][1])\n sumb5 = sumb5 + float(msg['data']['asks'][x][1])\n ratio1 = sums5 / sumb5\n if ratio1 < 1:\n ratio1 = 1 / ratio1 * -1 + 1\n else:\n ratio1 -= 1\n sums20 = 0\n sumb20 = 0\n ratio2 = 0\n try:\n for x in range(17):\n sums20 = sums20 + float(msg['data']['bids'][x][1])\n sumb20 = sumb20 + float(msg['data']['asks'][x][1])\n ratio2 = sums20 / sumb20\n if ratio2 < 1:\n ratio2 = 1 / ratio2 * -1 + 1\n else:\n ratio2 -= 1\n except Exception as e:\n print('')\n for i in range(len(symbols)):\n simbol = symbols[i].lower() + '@depth20'\n if simbol == msg['stream']:\n ratio5[i] = round(ratio1, 2)\n ratio20[i] = round(ratio2, 2)\n max_order5[i] = m\n ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * \n 100 / float(volume[i]), 2)\n current_price[i] = float(msg['data']['bids'][0][0])\n\n\ndef process_ticker(msg):\n i = 0\n for x in symbols:\n for y in range(len(msg)):\n if x == str(msg[y]['s']):\n volume[i] = int(float(msg[y]['q']))\n price_change[i] = int(float(msg[y]['P']))\n i += 1\n\n\nsymbols, volume, pozitii, k_line_1m, k_line_15m, price_change = get_kline()\nmax_order5 = [(0) for x in range(len(symbols))]\ncurrent_price = [(0) for x in range(len(symbols))]\nprice_chance_2_min = [(0) for x in range(len(symbols))]\nprice_chance_5_min = [(0) for x in range(len(symbols))]\nprice_chance_15_min = [(0) for x in range(len(symbols))]\nprice_chance_30_min = [(0) for x in range(len(symbols))]\nprice_change_25_30_min = [(0) for x in range(len(symbols))]\nprice_chance_1_hour = [(0) for x in range(len(symbols))]\nprice_chance_3_hour = [(0) for x in range(len(symbols))]\nprice_chance_8_hour = [(0) for x in range(len(symbols))]\nprice_change_1_days = [(0) for x in range(len(symbols))]\nprice_change_3_days = [(0) for x in range(len(symbols))]\nprice_change_5_days = [(0) for x in range(len(symbols))]\nprice_change_7_days = [(0) for x in range(len(symbols))]\nprice_change_10_days = [(0) for x in range(len(symbols))]\naverage_10_min = [(0) for x in range(len(symbols))]\naverage_20_min = [(0) for x in range(len(symbols))]\naverage_50_min = [(0) for x in range(len(symbols))]\naverage_100_min = [(0) for x in range(len(symbols))]\naverage_change_10_min = [(0) for x in range(len(symbols))]\naverage_change_20_min = [(0) for x in range(len(symbols))]\naverage_change_50_min = [(0) for x in range(len(symbols))]\naverage_change_100_min = [(0) for x in range(len(symbols))]\ntotal_score = [(0) for x in range(len(symbols))]\nratio5 = [(0) for x in range(len(symbols))]\nratio5_10sec = [[] for y in range(len(symbols))]\nratio5_sum = [(0) for x in range(len(symbols))]\nratio5_sum_10sec = [[] for y in range(len(symbols))]\nratio20 = [(0) for x in range(len(symbols))]\nlist = []\nfor x in symbols:\n list.append(x.lower() + '@depth20')\nbm = BinanceSocketManager(client)\nbm.start()\ndepth_socket = bm.start_multiplex_socket(list, process_depth)\nticker_socket = bm.start_ticker_socket(process_ticker)\n\n\ndef kline_continuum():\n i = 0\n while True:\n time.sleep(60)\n for x in range(len(symbols)):\n k_line_1m[x].pop(0)\n k_line_1m[x].append(current_price[x])\n if i % 15 == 0:\n k_line_15m[x].pop(0)\n k_line_15m[x].append(current_price[x])\n i += 1\n\n\ndef report_10_seconds():\n while True:\n for x in range(len(symbols)):\n if len(ratio5_10sec[x]) > 10:\n ratio5_10sec[x].pop(0)\n if len(ratio5_sum_10sec[x]) > 10:\n ratio5_sum_10sec[x].pop(0)\n ratio5_10sec[x].append(ratio5[x])\n ratio5_sum_10sec[x].append(ratio5_sum[x])\n time.sleep(1)\n\n\ndef calculate_score():\n for x in range(len(symbols)):\n score = 0\n a = float(price_chance_2_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 1.5:\n score += 1.5\n elif a >= 1.5 and a < 2:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_5_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 2:\n score += 1.5\n elif a >= 2 and a < 3:\n score += 0.5\n elif a >= 3:\n score += 0.25\n a = float(price_chance_15_min[x])\n if a <= 1 and a > -0.5:\n score += 0.25\n elif a <= -0.5 and a > -1:\n score += 0.5\n elif a <= -1 and a > -1.5:\n score += 0.75\n elif a <= -1.5:\n score += 1\n a = float(price_change_25_30_min[x])\n if a <= 2 and a > -0.75:\n score += 0.25\n elif a <= -0.75 and a > -1.25:\n score += 0.5\n elif a <= -1.25 and a > -1.75:\n score += 0.75\n elif a <= -1.75:\n score += 1\n a = float(price_chance_1_hour[x])\n if a <= 2 and a >= 0:\n score += 0.5\n elif a <= 0 and a > -2:\n score += 0.75\n elif a <= -2:\n score += 1\n a = float(price_chance_3_hour[x])\n if a <= 5 and a > -1:\n score += 0.25\n elif a <= -1 and a > -3:\n score += 0.5\n elif a <= -3 and a > -6:\n score += 0.75\n elif a <= -6:\n score += 1\n a = float(price_chance_8_hour[x])\n if a <= 0 and a > -4:\n score += 0.25\n elif a <= -4 and a > -6:\n score += 0.5\n elif a <= -6:\n score += 0.75\n if float(ratio5[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x])):\n if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min\n [x]) > 1:\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if float(ratio5_sum_10sec[x][i]) > 0.3:\n a += 1\n score += a / len(ratio5_sum_10sec[x])\n if float(ratio20[x]) > 0:\n score += 1\n a = 0\n for i in range(len(ratio5_10sec[x]) - 1):\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if a <= 2:\n score += 0.25\n elif a > 2 and a <= 4:\n score += 0.5\n elif a > 4 and a <= 7:\n score += 0.75\n elif a > 7:\n score += 1\n a = 0\n for i in range(20, 1, -1):\n if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):\n a += 1\n score += a / 10\n if float(price_change_1_days[x]) > 5:\n score += 0.3\n if float(price_change_3_days[x]) > 10:\n score += 0.25\n if float(price_change_5_days[x]) > 15:\n score += 0.25\n if float(price_change_7_days[x]) > 20:\n score += 0.25\n if float(price_change_10_days[x]) > -25:\n score += 0.25\n a = float(average_change_10_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_20_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_50_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n a = float(average_change_100_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n total_score[x] = score\n\n\ndef print_results():\n time.sleep(10)\n while True:\n for x in range(len(symbols)):\n try:\n price_chance_2_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-2]) - 100, 2)\n price_chance_5_min[x] = round(float(current_price[x]) * 100 /\n float(k_line_1m[x][-5]) - 100, 2)\n price_chance_15_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-15]) - 100, 2)\n price_chance_30_min[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_chance_1_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-60]) - 100, 2)\n price_chance_3_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][-180]) - 100, 2)\n price_chance_8_hour[x] = round(float(current_price[x]) * \n 100 / float(k_line_1m[x][20]) - 100, 2)\n price_change_25_30_min[x] = round(float(k_line_1m[x][-6]) *\n 100 / float(k_line_1m[x][-30]) - 100, 2)\n price_change_1_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-96]) - 100, 1)\n price_change_3_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-288]) - 100, 1)\n price_change_5_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-480]) - 100, 1)\n price_change_7_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-672]) - 100, 1)\n price_change_10_days[x] = round(float(current_price[x]) * \n 100 / float(k_line_15m[x][-960]) - 100, 1)\n average_10_min[x] = round(float(sum(k_line_1m[x][-10:])) / \n 10, 8)\n average_20_min[x] = round(float(sum(k_line_1m[x][-20:])) / \n 20, 8)\n average_50_min[x] = round(float(sum(k_line_1m[x][-50:])) / \n 50, 8)\n average_100_min[x] = round(float(sum(k_line_1m[x][-100:])) /\n 100, 8)\n average_change_10_min[x] = round(float(current_price[x]) * \n 100 / float(average_10_min[x]) - 100, 2)\n average_change_20_min[x] = round(float(current_price[x]) * \n 100 / float(average_20_min[x]) - 100, 2)\n average_change_50_min[x] = round(float(current_price[x]) * \n 100 / float(average_50_min[x]) - 100, 2)\n average_change_100_min[x] = round(float(current_price[x]) *\n 100 / float(average_100_min[x]) - 100, 2)\n except Exception as e:\n print(e)\n calculate_score()\n sort_by = total_score\n sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])\n sorted_data.reverse()\n print(time.ctime())\n print(\n '%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % ('Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch',\n '30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',\n '25-30m', 'r5sum', '1d_ch', '3d_ch', '5d_ch', '7d_ch', '10d_ch'))\n for k in range(10):\n i = sorted_data[k]\n print(\n '%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s'\n % (symbols[i][:-3], total_score[i], ratio5[i], ratio20[i],\n price_chance_2_min[i], price_chance_5_min[i],\n price_chance_15_min[i], price_chance_30_min[i],\n price_chance_1_hour[i], average_change_10_min[i],\n average_change_20_min[i], average_change_50_min[i],\n average_change_100_min[i], price_chance_8_hour[i],\n price_change_25_30_min[i], ratio5_sum[i],\n price_change_1_days[i], price_change_3_days[i],\n price_change_5_days[i], price_change_7_days[i],\n price_change_10_days[i]))\n try:\n if float(total_score[sorted_data[0]]) > 10:\n winsound.PlaySound('\\\\Sound.wav', winsound.SND_FILENAME)\n except Exception as e:\n print(e)\n time.sleep(1)\n\n\nthreads = [threading.Thread(target=kline_continuum), threading.Thread(\n target=report_10_seconds), threading.Thread(target=print_results)]\n[thread.start() for thread in threads]\n[thread.join() for thread in threads]\n",
"step-5": "from binance.client import Client\nfrom binance.websockets import BinanceSocketManager\nfrom binance.enums import *\nimport time\nimport threading\nimport winsound\n\n# Replace your_api_key, your_api_secret with your api_key, api_secret\nclient = Client(your_api_key, your_api_secret)\n\n\n# Calculate list of symbols\ndef calculate_data_list():\n counter=0\n btc='BTC'\n symbols=[]\n all_positions=[]\n positions_final=[]\n volume=[]\n c=[]\n price_change = []\n data=client.get_ticker()\n for x in range(len(data)):\n if (btc in data[x]['symbol']) and data[x]['symbol'] != 'BTCUSDT'and data[x]['symbol'] != 'VENBTC':\n if float(data[x]['quoteVolume'])>100:\n all_positions.append(x)\n for x in all_positions:\n c.append(float(data[x]['priceChangePercent']))\n i = sorted(range(len(c)), key=lambda k: c[k])\n i.reverse()\n while (len(positions_final) < 20 and len(positions_final) < len(all_positions)):\n symbols.append(data[all_positions[i[counter]]]['symbol'])\n positions_final.append(all_positions[i[counter]])\n volume.append(data[all_positions[i[counter]]]['quoteVolume'])\n price_change.append(data[all_positions[i[counter]]]['priceChangePercent'])\n counter += 1\n return symbols, volume, positions_final, price_change\n\n\n# Get candlestick data from Binance\ndef get_kline():\n symbols, volume, pozitii,price_change = calculate_data_list()\n prices = []\n prices1 = []\n k=[]\n\n for x in symbols:\n try:\n order = client.get_klines( # Get 1 minute candlestick data from server\n symbol=x,\n interval='1m')\n except BinanceAPIException as e:\n print (e.status_code)\n print (e.message)\n try:\n order1 = client.get_klines( # Get 15 minute candlestick data from server\n symbol=x,\n limit= 1000,\n interval='15m')\n except BinanceAPIException as e:\n print (e.status_code)\n print (e.message)\n\n if len(order1) < 970: # check if coin have at least 10 days of data\n a = symbols.index(x) # get index of x in symbols\n k.append(a)\n else:\n prices.append([]) # add empty list to list of 1 minute\n prices1.append([]) # add empty list to list of 15 minutes\n for i in range(len(order)):\n prices[-1].append(float(order[i][1])) # save 1 minute data\n for i in range(len(order1)):\n prices1[-1].append(float(order1[i][1])) # save 15 minute data\n k.reverse()\n\n for x in k:\n symbols.pop(x)\n volume.pop(x)\n all_positions.pop(x)\n price_change.pop(x)\n\n return symbols, volume, pozitii, prices, prices1,price_change\n# Calculate report between bid and ask offers\ndef process_depth(msg):\n sums5=0\n sumb5=0\n m=-1\n for x in range(5):\n if float(msg['data']['bids'][x][1])>m:\n m=float(msg['data']['bids'][x][1])\n sums5 = sums5 + float(msg['data']['bids'][x][1])\n sumb5 = sumb5 + float(msg['data']['asks'][x][1])\n ratio1 = sums5 / sumb5\n if (ratio1 < 1):\n ratio1 = ((1 / ratio1) * -1) + 1\n else:\n ratio1 -= 1\n sums20 = 0\n sumb20 = 0\n ratio2 = 0\n try:\n for x in range(17):\n sums20 = sums20 + float(msg['data']['bids'][x][1])\n sumb20 = sumb20 + float(msg['data']['asks'][x][1])\n ratio2 = sums20 / sumb20\n if (ratio2 < 1):\n ratio2 = ((1 / ratio2) * -1) + 1\n else:\n ratio2 -= 1\n except Exception as e:\n print(\"\")\n\n for i in range(len(symbols)):\n simbol = symbols[i].lower() + '@depth20'\n if simbol == msg['stream']:\n ratio5[i] = round(ratio1, 2)\n ratio20[i] = round(ratio2, 2)\n max_order5[i] = m\n ratio5_sum[i] = round(float(sums5) * float(current_price[i]) * 100 / float(volume[i]),2)\n current_price[i] = float(msg['data']['bids'][0][0])\n\n\n# Refresh price and volume to current price and volume\ndef process_ticker(msg):\n i=0\n for x in symbols:\n for y in range(len(msg)):\n if x == str(msg[y]['s']):\n volume[i] = int(float(msg[y]['q']))\n price_change[i] = int(float(msg[y]['P']))\n i+=1\n\nsymbols,volume,pozitii,k_line_1m,k_line_15m,price_change =get_kline()\n\n\n# Declaring lists necessary for storing data\nmax_order5=[0 for x in range(len(symbols))]\ncurrent_price= [0 for x in range(len(symbols))]\nprice_chance_2_min = [0 for x in range(len(symbols))]\nprice_chance_5_min = [0 for x in range(len(symbols))]\nprice_chance_15_min = [0 for x in range(len(symbols))]\nprice_chance_30_min = [0 for x in range(len(symbols))]\nprice_change_25_30_min = [0 for x in range(len(symbols))]\nprice_chance_1_hour = [0 for x in range(len(symbols))]\nprice_chance_3_hour = [0 for x in range(len(symbols))]\nprice_chance_8_hour = [0 for x in range(len(symbols))]\nprice_change_1_days = [0 for x in range(len(symbols))]\nprice_change_3_days = [0 for x in range(len(symbols))]\nprice_change_5_days = [0 for x in range(len(symbols))]\nprice_change_7_days = [0 for x in range(len(symbols))]\nprice_change_10_days = [0 for x in range(len(symbols))]\naverage_10_min = [0 for x in range(len(symbols))]\naverage_20_min = [0 for x in range(len(symbols))]\naverage_50_min = [0 for x in range(len(symbols))]\naverage_100_min = [0 for x in range(len(symbols))]\naverage_change_10_min = [0 for x in range(len(symbols))]\naverage_change_20_min = [0 for x in range(len(symbols))]\naverage_change_50_min = [0 for x in range(len(symbols))]\naverage_change_100_min = [0 for x in range(len(symbols))]\ntotal_score = [0 for x in range(len(symbols))]\nratio5=[0 for x in range(len(symbols))]\nratio5_10sec=[[] for y in range(len(symbols))]\nratio5_sum = [0 for x in range(len(symbols))]\nratio5_sum_10sec = [[] for y in range(len(symbols))]\nratio20= [0 for x in range(len(symbols))]\n\n# Create list neccessary for depth socked\nlist=[]\nfor x in symbols:\n list.append(x.lower()+'@depth20') # append @depth20 to each symbol and add it into list\n\nbm = BinanceSocketManager(client)\nbm.start()\ndepth_socket = bm.start_multiplex_socket(list,process_depth) # start depth socket\nticker_socket = bm.start_ticker_socket(process_ticker) # start price socket\n\n# maintain candlestick lists\ndef kline_continuum():\n i=0\n while True:\n time.sleep(60)\n for x in range(len(symbols)):\n k_line_1m[x].pop(0)\n k_line_1m[x].append(current_price[x]) # add price to list of 1 minute candlestick every 1 minute\n if i%15==0:\n k_line_15m[x].pop(0)\n k_line_15m[x].append(current_price[x]) # add price to list of 15 minute candlestick every 15 minute\n i+=1\n\n\n# Save report between ask and bit for the last 10 seconds\ndef report_10_seconds():\n while True:\n for x in range(len(symbols)):\n if len(ratio5_10sec[x])>10:\n ratio5_10sec[x].pop(0)\n if len(ratio5_sum_10sec[x]) > 10:\n ratio5_sum_10sec[x].pop(0)\n ratio5_10sec[x].append(ratio5[x])\n ratio5_sum_10sec[x].append(ratio5_sum[x])\n time.sleep(1)\n\n\n# Calculate score for each symbol, you can add as many parameters as you want\ndef calculate_score():\n for x in range(len(symbols)):\n score = 0\n\n # 2 minute change parameter score calculation\n a = float(price_chance_2_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 1.5:\n score += 1.5\n elif a >= 1.5 and a < 2:\n score += 0.5\n elif a >= 3:\n score += 0.25\n\n # 5 minute change parameter score calculation\n a = float(price_chance_5_min[x])\n if a > 0 and a < 0.5:\n score += 1\n elif a >= 0.5 and a < 1:\n score += 1.25\n elif a >= 1 and a < 2:\n score += 1.5\n elif a >= 2 and a < 3:\n score += 0.5\n elif a >= 3:\n score += 0.25\n\n # 15 minute change parameter score calculation\n a = float(price_chance_15_min[x])\n if a <= 1 and a > -0.5:\n score += 0.25\n elif a <= -0.5 and a > -1:\n score += 0.5\n elif a <= -1 and a > -1.5:\n score += 0.75\n elif a <= -1.5:\n score += 1\n\n # change between 25 and 30 minutes ago parameter score calculation\n a = float(price_change_25_30_min[x])\n if a <= 2 and a > -0.75:\n score += 0.25\n elif a <= -0.75 and a > -1.25:\n score += 0.5\n elif a <= -1.25 and a > -1.75:\n score += 0.75\n elif a <= -1.75:\n score += 1\n\n # 1 hour change parameter score calculation\n a = float(price_chance_1_hour[x])\n if a <= 2 and a >= 0:\n score += 0.5\n elif a <= 0 and a > -2:\n score += 0.75\n elif a <= -2:\n score += 1\n\n # 3 hour change parameter score calculation\n a = float(price_chance_3_hour[x])\n if a <= 5 and a > -1:\n score += 0.25\n elif a <= -1 and a > -3:\n score += 0.5\n elif a <= -3 and a > -6:\n score += 0.75\n elif a <= -6:\n score += 1\n\n # 8 hour change parameter score calculation\n a = float(price_chance_8_hour[x])\n if a <= 0 and a > -4:\n score += 0.25\n elif a <= -4 and a > -6:\n score += 0.5\n elif a <= -6:\n score += 0.75\n\n\n\n if float(ratio5[x]) > 0:\n score += 1\n\n\n a = 0\n for i in range(len(ratio5_10sec[x])):\n if float(price_chance_2_min[x]) > 0.55 or float(price_chance_5_min[x]) > 1:\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if float(ratio5_sum_10sec[x][i]) > 0.3:\n a += 1\n score += a / len(ratio5_sum_10sec[x])\n\n\n if float(ratio20[x]) > 0:\n score += 1\n\n a = 0\n for i in range(len(ratio5_10sec[x])-1):\n if float(ratio5_10sec[x][i]) > 0:\n a += 1\n if a <= 2:\n score += 0.25\n elif a > 2 and a <= 4:\n score += 0.5\n elif a > 4 and a <= 7:\n score += 0.75\n elif a > 7:\n score += 1\n\n a = 0\n for i in range(20, 1, -1):\n if float(k_line_1m[x][-i]) > float(k_line_1m[x][-(i - 1)]):\n a += 1\n score += a / 10\n\n # 1 day change parameter score calculation\n if float(price_change_1_days[x]) > 5:\n score+=0.3\n # 3 day change parameter score calculation\n if float(price_change_3_days[x]) > 10:\n score += 0.25\n # 5 day change parameter score calculation\n if float(price_change_5_days[x]) > 15:\n score += 0.25\n # 7 day change parameter score calculation\n if float(price_change_7_days[x]) > 20:\n score += 0.25\n # 10 day change parameter score calculation\n if float(price_change_10_days[x]) > -25:\n score += 0.25\n\n # 10 minutes moving average parameter score calculation\n a=float(average_change_10_min[x])\n if a<0.2 and a>-0.3:\n score+=0.1\n # 20 minutes moving average parameter score calculation\n a = float(average_change_20_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n # 50 minutes moving average parameter score calculation\n a = float(average_change_50_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n # 100 minutes moving average parameter score calculation\n a = float(average_change_100_min[x])\n if a < 0.2 and a > -0.3:\n score += 0.1\n\n # save score\n total_score[x] = score\n\n\ndef print_results():\n # sleep time before starting calculations\n time.sleep(10)\n\n while True:\n for x in range(len(symbols)):\n # calculate parameters percentages\n try:\n price_chance_2_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 2]) - 100, 2)\n price_chance_5_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 5]) - 100, 2)\n price_chance_15_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 15]) - 100, 2)\n price_chance_30_min[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 30]) - 100, 2)\n price_chance_1_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 60]) - 100, 2)\n price_chance_3_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][- 180]) - 100, 2)\n price_chance_8_hour[x] = round(float(current_price[x]) * 100 / float(k_line_1m[x][20]) - 100, 2)\n price_change_25_30_min[x] = round(float(k_line_1m[x][- 6]) * 100 / float(k_line_1m[x][- 30]) - 100, 2)\n price_change_1_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 96]) - 100, 1)\n price_change_3_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 288]) - 100, 1)\n price_change_5_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 480] )- 100, 1)\n price_change_7_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 672]) - 100, 1)\n price_change_10_days[x] = round(float(current_price[x]) * 100 / float(k_line_15m[x][- 960]) - 100, 1)\n average_10_min[x] = round(float(sum(k_line_1m[x][- 10:])) / 10, 8)\n average_20_min[x] = round(float(sum(k_line_1m[x][- 20:])) / 20, 8)\n average_50_min[x] = round(float(sum(k_line_1m[x][- 50:])) / 50, 8)\n average_100_min[x] = round(float(sum(k_line_1m[x][- 100:])) / 100, 8)\n average_change_10_min[x] = round(float(current_price[x]) * 100 / float(average_10_min[x]) - 100, 2)\n average_change_20_min[x] = round(float(current_price[x]) * 100 / float(average_20_min[x]) - 100, 2)\n average_change_50_min[x] = round(float(current_price[x]) * 100 / float(average_50_min[x]) - 100, 2)\n average_change_100_min[x] = round(float(current_price[x]) * 100 / float(average_100_min[x]) - 100, 2)\n except Exception as e:\n print(e)\n\n\n # call function for score calculation\n calculate_score()\n\n # select parameter for which data is sorted\n sort_by = total_score\n\n # sort data\n sorted_data = sorted(range(len(sort_by)), key=lambda k: sort_by[k])\n # sort data in reverse order\n sorted_data.reverse()\n\n #print table header\n print (time.ctime())\n print ('%5s %5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s' % (\n 'Symbol', 'score', 'r5', 'r20', '2m_ch', '5m_ch', '15m_ch', '30m_ch', '1h_ch', '10MA', '20MA', '50MA', '100MA', '8h_ch',\n '25-30m', 'r5sum', '1d_ch', '3d_ch','5d_ch', '7d_ch', '10d_ch'))\n\n # print top 10 cryptocurrencies data\n for k in range(10):\n i = sorted_data[k]\n print ('%5s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %6s %5s %6s %6s %6s %6s %6s' % (\n symbols[i][:-3], total_score[i], ratio5[i], ratio20[i], price_chance_2_min[i], price_chance_5_min[i],\n price_chance_15_min[i],price_chance_30_min[i], price_chance_1_hour[i], average_change_10_min[i],\n average_change_20_min[i],average_change_50_min[i], average_change_100_min[i], price_chance_8_hour[i],\n price_change_25_30_min[i], ratio5_sum[i], price_change_1_days[i], price_change_3_days[i],\n price_change_5_days[i], price_change_7_days[i], price_change_10_days[i]))\n\n # if score for one coin is > 10 will play sound\n try:\n if float(total_score[sorted_data[0]]) > 10:\n winsound.PlaySound('\\\\Sound.wav', winsound.SND_FILENAME)\n except Exception as e:\n print(e)\n\n # Seconds to wait before repeating while loop\n time.sleep(1)\n\n# Declaring threads\nthreads = [threading.Thread(target=kline_continuum),\n threading.Thread(target=report_10_seconds),\n threading.Thread(target=print_results)]\n# Starting threads\n[thread.start() for thread in threads]\n[thread.join() for thread in threads]\n\n\n",
"step-ids": [
5,
8,
9,
11,
12
]
}
|
[
5,
8,
9,
11,
12
] |
<|reserved_special_token_0|>
class TestFunctionalHumannEndtoEndBiom(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_humann_gene_families_biom_input(self):
"""
Test the standard humann flow on a gene families output file as input
Test with the biom format of the gene families file
"""
tempdir = utils.create_temp_folder('gene_families')
command = ['humann', '--input', cfg.demo_gene_families_biom,
'--output', tempdir]
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFunctionalHumannEndtoEndBiom(unittest.TestCase):
<|reserved_special_token_0|>
def test_humann_fastq_biom_output(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom']
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_biom, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
def test_humann_fastq_biom_output_pathways(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
Test the expected pathways are identified
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom', '--gap-fill', 'off']
utils.run_humann(command)
pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,
'demo_pathabundance.biom'))
pathways_found = set([x.split('\t')[0].split(':')[0] for x in
filter(lambda x: 'PWY' in x, pathways_file_tsv)])
self.assertEqual(pathways_found, cfg.
expected_demo_output_files_biom_pathways)
utils.remove_temp_folder(tempdir)
def test_humann_gene_families_biom_input(self):
"""
Test the standard humann flow on a gene families output file as input
Test with the biom format of the gene families file
"""
tempdir = utils.create_temp_folder('gene_families')
command = ['humann', '--input', cfg.demo_gene_families_biom,
'--output', tempdir]
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFunctionalHumannEndtoEndBiom(unittest.TestCase):
"""
Test humann with end to end functional tests
"""
def test_humann_fastq_biom_output(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom']
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_biom, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
def test_humann_fastq_biom_output_pathways(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
Test the expected pathways are identified
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom', '--gap-fill', 'off']
utils.run_humann(command)
pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,
'demo_pathabundance.biom'))
pathways_found = set([x.split('\t')[0].split(':')[0] for x in
filter(lambda x: 'PWY' in x, pathways_file_tsv)])
self.assertEqual(pathways_found, cfg.
expected_demo_output_files_biom_pathways)
utils.remove_temp_folder(tempdir)
def test_humann_gene_families_biom_input(self):
"""
Test the standard humann flow on a gene families output file as input
Test with the biom format of the gene families file
"""
tempdir = utils.create_temp_folder('gene_families')
command = ['humann', '--input', cfg.demo_gene_families_biom,
'--output', tempdir]
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
<|reserved_special_token_1|>
import unittest
import subprocess
import tempfile
import os
import filecmp
import shutil
import cfg
import utils
class TestFunctionalHumannEndtoEndBiom(unittest.TestCase):
"""
Test humann with end to end functional tests
"""
def test_humann_fastq_biom_output(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom']
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_biom, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
def test_humann_fastq_biom_output_pathways(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
Test the expected pathways are identified
"""
tempdir = utils.create_temp_folder('fastq')
command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,
'--output-format', 'biom', '--gap-fill', 'off']
utils.run_humann(command)
pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,
'demo_pathabundance.biom'))
pathways_found = set([x.split('\t')[0].split(':')[0] for x in
filter(lambda x: 'PWY' in x, pathways_file_tsv)])
self.assertEqual(pathways_found, cfg.
expected_demo_output_files_biom_pathways)
utils.remove_temp_folder(tempdir)
def test_humann_gene_families_biom_input(self):
"""
Test the standard humann flow on a gene families output file as input
Test with the biom format of the gene families file
"""
tempdir = utils.create_temp_folder('gene_families')
command = ['humann', '--input', cfg.demo_gene_families_biom,
'--output', tempdir]
utils.run_humann(command)
for expression, message in utils.check_output(cfg.
expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression, message)
utils.remove_temp_folder(tempdir)
<|reserved_special_token_1|>
import unittest
import subprocess
import tempfile
import os
import filecmp
import shutil
import cfg
import utils
class TestFunctionalHumannEndtoEndBiom(unittest.TestCase):
"""
Test humann with end to end functional tests
"""
def test_humann_fastq_biom_output(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq")
# run humann test
command = ["humann","--input",cfg.demo_fastq,"--output",tempdir,
"--output-format", "biom"]
utils.run_humann(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files_biom, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann_fastq_biom_output_pathways(self):
"""
Test the standard humann flow on a fastq input file
Test biom output is written
Test the expected pathways are identified
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq")
# run humann test
command = ["humann","--input",cfg.demo_fastq,"--output",tempdir,
"--output-format", "biom", "--gap-fill", "off"]
utils.run_humann(command)
# check the output file of pathway abundance has the expected pathways
pathways_file_tsv=utils.read_biom_table(os.path.join(tempdir,"demo_pathabundance.biom"))
pathways_found=set([x.split("\t")[0].split(":")[0] for x in filter(lambda x: "PWY" in x, pathways_file_tsv)])
self.assertEqual(pathways_found,cfg.expected_demo_output_files_biom_pathways)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann_gene_families_biom_input(self):
"""
Test the standard humann flow on a gene families output file as input
Test with the biom format of the gene families file
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("gene_families")
# run humann test
command = ["humann","--input",cfg.demo_gene_families_biom,"--output",tempdir]
utils.run_humann(command)
# check the output files are as expected
# it will include all output files except the gene families output file
# since this file was used as input
for expression, message in utils.check_output(cfg.expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
|
flexible
|
{
"blob_id": "27702f72ae147c435617acaab7dd7e5a5a737b13",
"index": 8152,
"step-1": "<mask token>\n\n\nclass TestFunctionalHumannEndtoEndBiom(unittest.TestCase):\n <mask token>\n <mask token>\n <mask token>\n\n def test_humann_gene_families_biom_input(self):\n \"\"\"\n Test the standard humann flow on a gene families output file as input\n Test with the biom format of the gene families file\n \"\"\"\n tempdir = utils.create_temp_folder('gene_families')\n command = ['humann', '--input', cfg.demo_gene_families_biom,\n '--output', tempdir]\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_genefamilies_input, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n",
"step-2": "<mask token>\n\n\nclass TestFunctionalHumannEndtoEndBiom(unittest.TestCase):\n <mask token>\n\n def test_humann_fastq_biom_output(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom']\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_biom, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_fastq_biom_output_pathways(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n Test the expected pathways are identified\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom', '--gap-fill', 'off']\n utils.run_humann(command)\n pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,\n 'demo_pathabundance.biom'))\n pathways_found = set([x.split('\\t')[0].split(':')[0] for x in\n filter(lambda x: 'PWY' in x, pathways_file_tsv)])\n self.assertEqual(pathways_found, cfg.\n expected_demo_output_files_biom_pathways)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_gene_families_biom_input(self):\n \"\"\"\n Test the standard humann flow on a gene families output file as input\n Test with the biom format of the gene families file\n \"\"\"\n tempdir = utils.create_temp_folder('gene_families')\n command = ['humann', '--input', cfg.demo_gene_families_biom,\n '--output', tempdir]\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_genefamilies_input, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n",
"step-3": "<mask token>\n\n\nclass TestFunctionalHumannEndtoEndBiom(unittest.TestCase):\n \"\"\"\n Test humann with end to end functional tests\n \"\"\"\n\n def test_humann_fastq_biom_output(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom']\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_biom, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_fastq_biom_output_pathways(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n Test the expected pathways are identified\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom', '--gap-fill', 'off']\n utils.run_humann(command)\n pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,\n 'demo_pathabundance.biom'))\n pathways_found = set([x.split('\\t')[0].split(':')[0] for x in\n filter(lambda x: 'PWY' in x, pathways_file_tsv)])\n self.assertEqual(pathways_found, cfg.\n expected_demo_output_files_biom_pathways)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_gene_families_biom_input(self):\n \"\"\"\n Test the standard humann flow on a gene families output file as input\n Test with the biom format of the gene families file\n \"\"\"\n tempdir = utils.create_temp_folder('gene_families')\n command = ['humann', '--input', cfg.demo_gene_families_biom,\n '--output', tempdir]\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_genefamilies_input, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n",
"step-4": "import unittest\nimport subprocess\nimport tempfile\nimport os\nimport filecmp\nimport shutil\nimport cfg\nimport utils\n\n\nclass TestFunctionalHumannEndtoEndBiom(unittest.TestCase):\n \"\"\"\n Test humann with end to end functional tests\n \"\"\"\n\n def test_humann_fastq_biom_output(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom']\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_biom, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_fastq_biom_output_pathways(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n Test the expected pathways are identified\n \"\"\"\n tempdir = utils.create_temp_folder('fastq')\n command = ['humann', '--input', cfg.demo_fastq, '--output', tempdir,\n '--output-format', 'biom', '--gap-fill', 'off']\n utils.run_humann(command)\n pathways_file_tsv = utils.read_biom_table(os.path.join(tempdir,\n 'demo_pathabundance.biom'))\n pathways_found = set([x.split('\\t')[0].split(':')[0] for x in\n filter(lambda x: 'PWY' in x, pathways_file_tsv)])\n self.assertEqual(pathways_found, cfg.\n expected_demo_output_files_biom_pathways)\n utils.remove_temp_folder(tempdir)\n\n def test_humann_gene_families_biom_input(self):\n \"\"\"\n Test the standard humann flow on a gene families output file as input\n Test with the biom format of the gene families file\n \"\"\"\n tempdir = utils.create_temp_folder('gene_families')\n command = ['humann', '--input', cfg.demo_gene_families_biom,\n '--output', tempdir]\n utils.run_humann(command)\n for expression, message in utils.check_output(cfg.\n expected_demo_output_files_genefamilies_input, tempdir):\n self.assertTrue(expression, message)\n utils.remove_temp_folder(tempdir)\n",
"step-5": "import unittest\nimport subprocess\nimport tempfile\nimport os\nimport filecmp\nimport shutil\n\nimport cfg\nimport utils\n\nclass TestFunctionalHumannEndtoEndBiom(unittest.TestCase):\n \"\"\"\n Test humann with end to end functional tests\n \"\"\"\n\n def test_humann_fastq_biom_output(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n \"\"\"\n \n # create a temp directory for output\n tempdir = utils.create_temp_folder(\"fastq\")\n \n # run humann test\n command = [\"humann\",\"--input\",cfg.demo_fastq,\"--output\",tempdir,\n \"--output-format\", \"biom\"]\n utils.run_humann(command)\n \n # check the output files are as expected\n for expression, message in utils.check_output(cfg.expected_demo_output_files_biom, tempdir):\n self.assertTrue(expression,message)\n\n # remove the temp directory\n utils.remove_temp_folder(tempdir)\n \n def test_humann_fastq_biom_output_pathways(self):\n \"\"\"\n Test the standard humann flow on a fastq input file\n Test biom output is written\n Test the expected pathways are identified\n \"\"\"\n \n # create a temp directory for output\n tempdir = utils.create_temp_folder(\"fastq\")\n \n # run humann test\n command = [\"humann\",\"--input\",cfg.demo_fastq,\"--output\",tempdir,\n \"--output-format\", \"biom\", \"--gap-fill\", \"off\"]\n utils.run_humann(command)\n \n # check the output file of pathway abundance has the expected pathways\n pathways_file_tsv=utils.read_biom_table(os.path.join(tempdir,\"demo_pathabundance.biom\"))\n pathways_found=set([x.split(\"\\t\")[0].split(\":\")[0] for x in filter(lambda x: \"PWY\" in x, pathways_file_tsv)])\n \n self.assertEqual(pathways_found,cfg.expected_demo_output_files_biom_pathways)\n\n # remove the temp directory\n utils.remove_temp_folder(tempdir)\n \n def test_humann_gene_families_biom_input(self):\n \"\"\"\n Test the standard humann flow on a gene families output file as input\n Test with the biom format of the gene families file\n \"\"\"\n \n # create a temp directory for output\n tempdir = utils.create_temp_folder(\"gene_families\")\n \n # run humann test\n command = [\"humann\",\"--input\",cfg.demo_gene_families_biom,\"--output\",tempdir]\n utils.run_humann(command)\n \n # check the output files are as expected\n # it will include all output files except the gene families output file\n # since this file was used as input\n for expression, message in utils.check_output(cfg.expected_demo_output_files_genefamilies_input, tempdir):\n self.assertTrue(expression,message)\n\n # remove the temp directory\n utils.remove_temp_folder(tempdir)\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('blog', '0014_auto_20190409_1917')]
operations = [migrations.AlterField(model_name='article', name=
'estArchive', field=models.BooleanField(default=False, verbose_name
="Archiver l'article")), migrations.AlterField(model_name='projet',
name='estArchive', field=models.BooleanField(default=False,
verbose_name='Archiver le projet'))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('blog', '0014_auto_20190409_1917')]
operations = [migrations.AlterField(model_name='article', name=
'estArchive', field=models.BooleanField(default=False, verbose_name
="Archiver l'article")), migrations.AlterField(model_name='projet',
name='estArchive', field=models.BooleanField(default=False,
verbose_name='Archiver le projet'))]
<|reserved_special_token_1|>
# Generated by Django 2.1.3 on 2019-04-10 11:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0014_auto_20190409_1917'),
]
operations = [
migrations.AlterField(
model_name='article',
name='estArchive',
field=models.BooleanField(default=False, verbose_name="Archiver l'article"),
),
migrations.AlterField(
model_name='projet',
name='estArchive',
field=models.BooleanField(default=False, verbose_name='Archiver le projet'),
),
]
|
flexible
|
{
"blob_id": "21c8078a18ee4579fa9b4b1b667d6ea0c1ce99b3",
"index": 6005,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0014_auto_20190409_1917')]\n operations = [migrations.AlterField(model_name='article', name=\n 'estArchive', field=models.BooleanField(default=False, verbose_name\n =\"Archiver l'article\")), migrations.AlterField(model_name='projet',\n name='estArchive', field=models.BooleanField(default=False,\n verbose_name='Archiver le projet'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0014_auto_20190409_1917')]\n operations = [migrations.AlterField(model_name='article', name=\n 'estArchive', field=models.BooleanField(default=False, verbose_name\n =\"Archiver l'article\")), migrations.AlterField(model_name='projet',\n name='estArchive', field=models.BooleanField(default=False,\n verbose_name='Archiver le projet'))]\n",
"step-5": "# Generated by Django 2.1.3 on 2019-04-10 11:04\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0014_auto_20190409_1917'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='article',\n name='estArchive',\n field=models.BooleanField(default=False, verbose_name=\"Archiver l'article\"),\n ),\n migrations.AlterField(\n model_name='projet',\n name='estArchive',\n field=models.BooleanField(default=False, verbose_name='Archiver le projet'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for line in lines:
strpline = line.rstrip()
arr = strpline.split(',')
newline = []
for i in range(len(arr)):
if arr[i] == 'y':
newline.append(i)
if arr[0] == 'republican':
newline.append(100)
label.append(0)
else:
newline.append(200)
label.append(1)
X.append(newline)
print(
'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'
)
<|reserved_special_token_0|>
print(len(a))
<|reserved_special_token_0|>
print("""
b. Write top 10 itemsets (in terms of highest support value).""")
for i in range(10):
print(b3[i])
print("""
c. How many frequent itemsets have 100 as part of itemsets?""")
<|reserved_special_token_0|>
for i in range(len(a)):
if 100 in a[i][0]:
c1.append(a[i].tolist())
<|reserved_special_token_0|>
print(len(c3))
print("""
d. How many frequent itemsets have 200 as part of itemsets?""")
<|reserved_special_token_0|>
for i in range(len(a)):
if 200 in a[i][0]:
d1.append(a[i].tolist())
<|reserved_special_token_0|>
print(len(d3))
print(
"""
e. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100."""
)
<|reserved_special_token_0|>
for i in range(len(e3)):
if e3[i][0] == 100:
e4.append(e3[i].tolist())
<|reserved_special_token_0|>
for i in range(10):
print('confidence value:', e5[i][2], ' association rule:', e5[i][1],
'→', e5[i][0])
print(
"""
f. How many rules with head 100 are there for which the confidence value is more than 75%? List them."""
)
<|reserved_special_token_0|>
for i in range(len(f1)):
if f1[i][2] > 0.75:
count_100 = count_100 + 1
print('confidence value:', f1[i][2], ' association rule:', f1[i]
[1], '→', f1[i][0])
print('Total:', count_100)
print(
"""
g. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200."""
)
<|reserved_special_token_0|>
for i in range(len(g3)):
if g3[i][0] == 200:
g4.append(g3[i].tolist())
<|reserved_special_token_0|>
for i in range(10):
print('confidence value:', g5[i][2], ' association rule:', g5[i][1],
'→', g5[i][0])
print(
"""
h. How many rules with head 200 are there for which the confidence value is more than 75%? List them."""
)
<|reserved_special_token_0|>
for i in range(len(h1)):
if h1[i][2] > 0.75:
count_200 = count_200 + 1
print('confidence value:', h1[i][2], ' association rule:', h1[i]
[1], '→', h1[i][0])
print('Total:', count_200)
print("""
i. soft-margin SVM with linear kernel""")
<|reserved_special_token_0|>
for i in range(len(X)):
for j in range(len(i2)):
if set(i2[j]).issubset(set(X[i])) == True:
i3[i][j] = 1
else:
i3[i][j] = 0
<|reserved_special_token_0|>
print('The best parameters: ', model_linear.best_params_)
<|reserved_special_token_0|>
print('accurac:', accuracy_lin_1)
<|reserved_special_token_0|>
print('The best parameters: ', model_linear.best_params_)
<|reserved_special_token_0|>
print('accurac:', accuracy_lin_2)
<|reserved_special_token_0|>
print('The best parameters: ', model_linear.best_params_)
<|reserved_special_token_0|>
print('accurac:', accuracy_lin_3)
<|reserved_special_token_0|>
print('Average 3-fold classification accuracy(along with standard deviation):',
scores_lin.mean(), '(+/-', scores_lin.std(), ')')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
f = open('house-votes-84.data', 'r')
lines = f.readlines()
X = []
label = []
for line in lines:
strpline = line.rstrip()
arr = strpline.split(',')
newline = []
for i in range(len(arr)):
if arr[i] == 'y':
newline.append(i)
if arr[0] == 'republican':
newline.append(100)
label.append(0)
else:
newline.append(200)
label.append(1)
X.append(newline)
print(
'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'
)
a = np.array(fim.eclat(X, supp=20))
print(len(a))
b1 = fim.eclat(X, supp=20, report='a')
b2 = np.array(b1)
b3 = b2[b2[:, 1].argsort()][::-1]
print("""
b. Write top 10 itemsets (in terms of highest support value).""")
for i in range(10):
print(b3[i])
print("""
c. How many frequent itemsets have 100 as part of itemsets?""")
c1 = []
a = np.array(a)
for i in range(len(a)):
if 100 in a[i][0]:
c1.append(a[i].tolist())
c2 = np.array(c1)
c3 = c2[c2[:, 1].argsort()][::-1].tolist()
print(len(c3))
print("""
d. How many frequent itemsets have 200 as part of itemsets?""")
d1 = []
for i in range(len(a)):
if 200 in a[i][0]:
d1.append(a[i].tolist())
d2 = np.array(d1)
d3 = d2[d2[:, 1].argsort()][::-1].tolist()
print(len(d3))
print(
"""
e. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100."""
)
e1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)
e2 = np.array(e1)
e3 = e2[e2[:, 2].argsort()][::-1]
e4 = []
for i in range(len(e3)):
if e3[i][0] == 100:
e4.append(e3[i].tolist())
e5 = np.array(e4)
for i in range(10):
print('confidence value:', e5[i][2], ' association rule:', e5[i][1],
'→', e5[i][0])
print(
"""
f. How many rules with head 100 are there for which the confidence value is more than 75%? List them."""
)
f1 = e5.copy()
count_100 = 0
for i in range(len(f1)):
if f1[i][2] > 0.75:
count_100 = count_100 + 1
print('confidence value:', f1[i][2], ' association rule:', f1[i]
[1], '→', f1[i][0])
print('Total:', count_100)
print(
"""
g. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200."""
)
g2 = np.array(e1)
g3 = g2[g2[:, 2].argsort()][::-1]
g4 = []
for i in range(len(g3)):
if g3[i][0] == 200:
g4.append(g3[i].tolist())
g5 = np.array(g4)
for i in range(10):
print('confidence value:', g5[i][2], ' association rule:', g5[i][1],
'→', g5[i][0])
print(
"""
h. How many rules with head 200 are there for which the confidence value is more than 75%? List them."""
)
h1 = g5.copy()
count_200 = 0
for i in range(len(h1)):
if h1[i][2] > 0.75:
count_200 = count_200 + 1
print('confidence value:', h1[i][2], ' association rule:', h1[i]
[1], '→', h1[i][0])
print('Total:', count_200)
print("""
i. soft-margin SVM with linear kernel""")
i1 = e3[:, 1].copy()
i2 = list(dict.fromkeys(i1))
i3 = np.zeros((len(X), len(i2))).astype(int)
for i in range(len(X)):
for j in range(len(i2)):
if set(i2[j]).issubset(set(X[i])) == True:
i3[i][j] = 1
else:
i3[i][j] = 0
(data_train_lin_1, data_test_lin_1, data_train_label_lin_1,
data_test_label_lin_1) = (train_test_split(i3, label, train_size=0.75,
random_state=0, stratify=label))
parameters_linear = [{'C': [0.5, 0.7, 0.9, 1.0, 1.5]}]
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_1, data_train_label_lin_1)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_1 = model_linear.predict(data_test_lin_1)
accuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)
print('accurac:', accuracy_lin_1)
(data_test_lin_2, data_train_lin_2, data_test_label_lin_2,
data_train_label_lin_2) = (train_test_split(i3, label, train_size=0.25,
random_state=0, stratify=label))
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_2, data_train_label_lin_2)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_2 = model_linear.predict(data_test_lin_2)
accuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)
print('accurac:', accuracy_lin_2)
(data_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3,
data_temp2_label_lin_3) = (train_test_split(i3, label, train_size=0.375,
random_state=0, stratify=label))
(data_test_lin_3, data_temp3_lin_3, data_test_label_lin_3,
data_temp3_label_lin_3) = (train_test_split(data_temp2_lin_3,
data_temp2_label_lin_3, train_size=0.4, random_state=0, stratify=
data_temp2_label_lin_3))
data_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))
data_train_label_lin_3 = np.hstack((data_temp1_label_lin_3,
data_temp3_label_lin_3))
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_3, data_train_label_lin_3)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_3 = model_linear.predict(data_test_lin_3)
accuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)
print('accurac:', accuracy_lin_3)
scores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])
print('Average 3-fold classification accuracy(along with standard deviation):',
scores_lin.mean(), '(+/-', scores_lin.std(), ')')
<|reserved_special_token_1|>
import fim
import numpy as np
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
f = open('house-votes-84.data', 'r')
lines = f.readlines()
X = []
label = []
for line in lines:
strpline = line.rstrip()
arr = strpline.split(',')
newline = []
for i in range(len(arr)):
if arr[i] == 'y':
newline.append(i)
if arr[0] == 'republican':
newline.append(100)
label.append(0)
else:
newline.append(200)
label.append(1)
X.append(newline)
print(
'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'
)
a = np.array(fim.eclat(X, supp=20))
print(len(a))
b1 = fim.eclat(X, supp=20, report='a')
b2 = np.array(b1)
b3 = b2[b2[:, 1].argsort()][::-1]
print("""
b. Write top 10 itemsets (in terms of highest support value).""")
for i in range(10):
print(b3[i])
print("""
c. How many frequent itemsets have 100 as part of itemsets?""")
c1 = []
a = np.array(a)
for i in range(len(a)):
if 100 in a[i][0]:
c1.append(a[i].tolist())
c2 = np.array(c1)
c3 = c2[c2[:, 1].argsort()][::-1].tolist()
print(len(c3))
print("""
d. How many frequent itemsets have 200 as part of itemsets?""")
d1 = []
for i in range(len(a)):
if 200 in a[i][0]:
d1.append(a[i].tolist())
d2 = np.array(d1)
d3 = d2[d2[:, 1].argsort()][::-1].tolist()
print(len(d3))
print(
"""
e. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100."""
)
e1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)
e2 = np.array(e1)
e3 = e2[e2[:, 2].argsort()][::-1]
e4 = []
for i in range(len(e3)):
if e3[i][0] == 100:
e4.append(e3[i].tolist())
e5 = np.array(e4)
for i in range(10):
print('confidence value:', e5[i][2], ' association rule:', e5[i][1],
'→', e5[i][0])
print(
"""
f. How many rules with head 100 are there for which the confidence value is more than 75%? List them."""
)
f1 = e5.copy()
count_100 = 0
for i in range(len(f1)):
if f1[i][2] > 0.75:
count_100 = count_100 + 1
print('confidence value:', f1[i][2], ' association rule:', f1[i]
[1], '→', f1[i][0])
print('Total:', count_100)
print(
"""
g. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200."""
)
g2 = np.array(e1)
g3 = g2[g2[:, 2].argsort()][::-1]
g4 = []
for i in range(len(g3)):
if g3[i][0] == 200:
g4.append(g3[i].tolist())
g5 = np.array(g4)
for i in range(10):
print('confidence value:', g5[i][2], ' association rule:', g5[i][1],
'→', g5[i][0])
print(
"""
h. How many rules with head 200 are there for which the confidence value is more than 75%? List them."""
)
h1 = g5.copy()
count_200 = 0
for i in range(len(h1)):
if h1[i][2] > 0.75:
count_200 = count_200 + 1
print('confidence value:', h1[i][2], ' association rule:', h1[i]
[1], '→', h1[i][0])
print('Total:', count_200)
print("""
i. soft-margin SVM with linear kernel""")
i1 = e3[:, 1].copy()
i2 = list(dict.fromkeys(i1))
i3 = np.zeros((len(X), len(i2))).astype(int)
for i in range(len(X)):
for j in range(len(i2)):
if set(i2[j]).issubset(set(X[i])) == True:
i3[i][j] = 1
else:
i3[i][j] = 0
(data_train_lin_1, data_test_lin_1, data_train_label_lin_1,
data_test_label_lin_1) = (train_test_split(i3, label, train_size=0.75,
random_state=0, stratify=label))
parameters_linear = [{'C': [0.5, 0.7, 0.9, 1.0, 1.5]}]
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_1, data_train_label_lin_1)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_1 = model_linear.predict(data_test_lin_1)
accuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)
print('accurac:', accuracy_lin_1)
(data_test_lin_2, data_train_lin_2, data_test_label_lin_2,
data_train_label_lin_2) = (train_test_split(i3, label, train_size=0.25,
random_state=0, stratify=label))
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_2, data_train_label_lin_2)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_2 = model_linear.predict(data_test_lin_2)
accuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)
print('accurac:', accuracy_lin_2)
(data_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3,
data_temp2_label_lin_3) = (train_test_split(i3, label, train_size=0.375,
random_state=0, stratify=label))
(data_test_lin_3, data_temp3_lin_3, data_test_label_lin_3,
data_temp3_label_lin_3) = (train_test_split(data_temp2_lin_3,
data_temp2_label_lin_3, train_size=0.4, random_state=0, stratify=
data_temp2_label_lin_3))
data_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))
data_train_label_lin_3 = np.hstack((data_temp1_label_lin_3,
data_temp3_label_lin_3))
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(
data_train_lin_3, data_train_label_lin_3)
print('The best parameters: ', model_linear.best_params_)
predicted_label_lin_3 = model_linear.predict(data_test_lin_3)
accuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)
print('accurac:', accuracy_lin_3)
scores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])
print('Average 3-fold classification accuracy(along with standard deviation):',
scores_lin.mean(), '(+/-', scores_lin.std(), ')')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
###########################
# CSCI 573 Data Mining - Eclat and Linear Kernel SVM
# Author: Chu-An Tsai
# 12/14/2019
###########################
import fim
import numpy as np
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
f = open('house-votes-84.data','r')
lines = f.readlines()
X = []
label = []
for line in lines:
strpline = line.rstrip()
arr = strpline.split(',')
newline = [];
for i in range(len(arr)):
if arr[i] == 'y':
newline.append(i)
if arr[0] == 'republican':
newline.append(100)
label.append(0)
else:
newline.append(200)
label.append(1)
#print(*newline, sep=',')
X.append(newline)
################################# a.
print('a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?')
a = np.array(fim.eclat(X, supp=20))
print(len(a))
################################# b.
b1 = fim.eclat(X, supp=20, report='a')
b2 = np.array(b1)
b3 = b2[b2[:,1].argsort()][::-1]
print('\nb. Write top 10 itemsets (in terms of highest support value).')
for i in range(10):
print(b3[i])
################################# c.
print('\nc. How many frequent itemsets have 100 as part of itemsets?')
c1 = []
a=np.array(a)
for i in range(len(a)):
if 100 in a[i][0]:
c1.append(a[i].tolist())
c2 = np.array(c1)
c3 = c2[c2[:,1].argsort()][::-1].tolist()
print(len(c3))
################################## d.
print('\nd. How many frequent itemsets have 200 as part of itemsets?')
d1 = []
for i in range(len(a)):
if 200 in a[i][0]:
d1.append(a[i].tolist())
d2 = np.array(d1)
d3 = d2[d2[:,1].argsort()][::-1].tolist()
print(len(d3))
################################## e.
print('\ne. Write top 10 association rules (in terms of highest confidence value) where the rule''s head is 100.')
e1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)
e2 = np.array(e1)
e3 = e2[e2[:,2].argsort()][::-1]
e4 = []
for i in range(len(e3)):
if e3[i][0] == 100:
e4.append(e3[i].tolist())
e5 = np.array(e4)
for i in range(10):
print('confidence value:',e5[i][2],' association rule:', e5[i][1], '→', e5[i][0],)
################################## f.
print('\nf. How many rules with head 100 are there for which the confidence value is more than 75%? List them.')
f1 = e5.copy()
count_100 = 0
for i in range(len(f1)):
if (f1[i][2]) > 0.75:
count_100 = count_100 + 1
print('confidence value:', f1[i][2], ' association rule:', f1[i][1], '→', f1[i][0],)
print('Total:',count_100)
################################## g.
print('\ng. Write top 10 association rules (in terms of highest confidence value) where the rule''s head is 200.')
g2 = np.array(e1)
g3 = g2[g2[:,2].argsort()][::-1]
g4 = []
for i in range(len(g3)):
if g3[i][0] == 200:
g4.append(g3[i].tolist())
g5 = np.array(g4)
for i in range(10):
print('confidence value:',g5[i][2],' association rule:', g5[i][1], '→', g5[i][0],)
################################## h.
print('\nh. How many rules with head 200 are there for which the confidence value is more than 75%? List them.')
h1 = g5.copy()
count_200 = 0
for i in range(len(h1)):
if (h1[i][2]) > 0.75:
count_200 = count_200 + 1
print('confidence value:', h1[i][2], ' association rule:', h1[i][1], '→', h1[i][0],)
print('Total:',count_200)
################################### i.
print('\ni. soft-margin SVM with linear kernel')
i1 = e3[:,1].copy()
i2 = list(dict.fromkeys(i1))
i3 = np.zeros((len(X),len(i2))).astype(int)
for i in range(len(X)):
for j in range(len(i2)):
if (set(i2[j]).issubset(set(X[i]))) == True:
i3[i][j] = 1
else:
i3[i][j] = 0
# Training set = first 75% data, Tuning set = 25% from training set, Test set = last 25% data
data_train_lin_1, data_test_lin_1, data_train_label_lin_1, data_test_label_lin_1 = train_test_split(i3, label, train_size=0.75, random_state = 0, stratify = label)
#C = np.arange(0.01, 2, 0.01)
#parameters_linear = [{'C':C}]
parameters_linear = [{'C':[0.5, 0.7, 0.9, 1.0, 1.5]}]
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_1, data_train_label_lin_1)
print('The best parameters: ', model_linear.best_params_)
#print("Scores for crossvalidation:")
#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):
#print("Accuracy: %0.6f for %r" % (mean, params))
predicted_label_lin_1 = model_linear.predict(data_test_lin_1)
accuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)
print('accurac:',accuracy_lin_1)
# Training set = last 75% data, Tuning set = 25% from training set, Test set = first 25% data
data_test_lin_2, data_train_lin_2, data_test_label_lin_2, data_train_label_lin_2 = train_test_split(i3, label, train_size=0.25, random_state = 0, stratify = label)
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_2, data_train_label_lin_2)
print('The best parameters: ', model_linear.best_params_)
#print("Scores for crossvalidation:")
#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):
#print("Accuracy: %0.6f for %r" % (mean, params))
predicted_label_lin_2 = model_linear.predict(data_test_lin_2)
accuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)
print('accurac:',accuracy_lin_2)
# Training set = first 37.5% and last 37.5%, Tuning set = 25% from training set, Test set = first 25% data
data_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3, data_temp2_label_lin_3 = train_test_split(i3, label, train_size=0.375, random_state = 0, stratify = label)
data_test_lin_3, data_temp3_lin_3, data_test_label_lin_3, data_temp3_label_lin_3 = train_test_split(data_temp2_lin_3, data_temp2_label_lin_3, train_size=0.4, random_state = 0, stratify = data_temp2_label_lin_3)
data_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))
data_train_label_lin_3 = np.hstack((data_temp1_label_lin_3, data_temp3_label_lin_3))
model_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_3, data_train_label_lin_3)
print('The best parameters: ', model_linear.best_params_)
#print("Scores for crossvalidation:")
#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):
#print("Accuracy: %0.6f for %r" % (mean, params))
predicted_label_lin_3 = model_linear.predict(data_test_lin_3)
accuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)
print('accurac:',accuracy_lin_3)
scores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])
print('Average 3-fold classification accuracy(along with standard deviation):', scores_lin.mean(), '(+/-',scores_lin.std(),')')
|
flexible
|
{
"blob_id": "07b05093b630fc0167532884ec69a00420ed70b4",
"index": 4021,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in lines:\n strpline = line.rstrip()\n arr = strpline.split(',')\n newline = []\n for i in range(len(arr)):\n if arr[i] == 'y':\n newline.append(i)\n if arr[0] == 'republican':\n newline.append(100)\n label.append(0)\n else:\n newline.append(200)\n label.append(1)\n X.append(newline)\nprint(\n 'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'\n )\n<mask token>\nprint(len(a))\n<mask token>\nprint(\"\"\"\nb. Write top 10 itemsets (in terms of highest support value).\"\"\")\nfor i in range(10):\n print(b3[i])\nprint(\"\"\"\nc. How many frequent itemsets have 100 as part of itemsets?\"\"\")\n<mask token>\nfor i in range(len(a)):\n if 100 in a[i][0]:\n c1.append(a[i].tolist())\n<mask token>\nprint(len(c3))\nprint(\"\"\"\nd. How many frequent itemsets have 200 as part of itemsets?\"\"\")\n<mask token>\nfor i in range(len(a)):\n if 200 in a[i][0]:\n d1.append(a[i].tolist())\n<mask token>\nprint(len(d3))\nprint(\n \"\"\"\ne. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100.\"\"\"\n )\n<mask token>\nfor i in range(len(e3)):\n if e3[i][0] == 100:\n e4.append(e3[i].tolist())\n<mask token>\nfor i in range(10):\n print('confidence value:', e5[i][2], ' association rule:', e5[i][1],\n '→', e5[i][0])\nprint(\n \"\"\"\nf. How many rules with head 100 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\n<mask token>\nfor i in range(len(f1)):\n if f1[i][2] > 0.75:\n count_100 = count_100 + 1\n print('confidence value:', f1[i][2], ' association rule:', f1[i]\n [1], '→', f1[i][0])\nprint('Total:', count_100)\nprint(\n \"\"\"\ng. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200.\"\"\"\n )\n<mask token>\nfor i in range(len(g3)):\n if g3[i][0] == 200:\n g4.append(g3[i].tolist())\n<mask token>\nfor i in range(10):\n print('confidence value:', g5[i][2], ' association rule:', g5[i][1],\n '→', g5[i][0])\nprint(\n \"\"\"\nh. How many rules with head 200 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\n<mask token>\nfor i in range(len(h1)):\n if h1[i][2] > 0.75:\n count_200 = count_200 + 1\n print('confidence value:', h1[i][2], ' association rule:', h1[i]\n [1], '→', h1[i][0])\nprint('Total:', count_200)\nprint(\"\"\"\ni. soft-margin SVM with linear kernel\"\"\")\n<mask token>\nfor i in range(len(X)):\n for j in range(len(i2)):\n if set(i2[j]).issubset(set(X[i])) == True:\n i3[i][j] = 1\n else:\n i3[i][j] = 0\n<mask token>\nprint('The best parameters: ', model_linear.best_params_)\n<mask token>\nprint('accurac:', accuracy_lin_1)\n<mask token>\nprint('The best parameters: ', model_linear.best_params_)\n<mask token>\nprint('accurac:', accuracy_lin_2)\n<mask token>\nprint('The best parameters: ', model_linear.best_params_)\n<mask token>\nprint('accurac:', accuracy_lin_3)\n<mask token>\nprint('Average 3-fold classification accuracy(along with standard deviation):',\n scores_lin.mean(), '(+/-', scores_lin.std(), ')')\n",
"step-3": "<mask token>\nf = open('house-votes-84.data', 'r')\nlines = f.readlines()\nX = []\nlabel = []\nfor line in lines:\n strpline = line.rstrip()\n arr = strpline.split(',')\n newline = []\n for i in range(len(arr)):\n if arr[i] == 'y':\n newline.append(i)\n if arr[0] == 'republican':\n newline.append(100)\n label.append(0)\n else:\n newline.append(200)\n label.append(1)\n X.append(newline)\nprint(\n 'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'\n )\na = np.array(fim.eclat(X, supp=20))\nprint(len(a))\nb1 = fim.eclat(X, supp=20, report='a')\nb2 = np.array(b1)\nb3 = b2[b2[:, 1].argsort()][::-1]\nprint(\"\"\"\nb. Write top 10 itemsets (in terms of highest support value).\"\"\")\nfor i in range(10):\n print(b3[i])\nprint(\"\"\"\nc. How many frequent itemsets have 100 as part of itemsets?\"\"\")\nc1 = []\na = np.array(a)\nfor i in range(len(a)):\n if 100 in a[i][0]:\n c1.append(a[i].tolist())\nc2 = np.array(c1)\nc3 = c2[c2[:, 1].argsort()][::-1].tolist()\nprint(len(c3))\nprint(\"\"\"\nd. How many frequent itemsets have 200 as part of itemsets?\"\"\")\nd1 = []\nfor i in range(len(a)):\n if 200 in a[i][0]:\n d1.append(a[i].tolist())\nd2 = np.array(d1)\nd3 = d2[d2[:, 1].argsort()][::-1].tolist()\nprint(len(d3))\nprint(\n \"\"\"\ne. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100.\"\"\"\n )\ne1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)\ne2 = np.array(e1)\ne3 = e2[e2[:, 2].argsort()][::-1]\ne4 = []\nfor i in range(len(e3)):\n if e3[i][0] == 100:\n e4.append(e3[i].tolist())\ne5 = np.array(e4)\nfor i in range(10):\n print('confidence value:', e5[i][2], ' association rule:', e5[i][1],\n '→', e5[i][0])\nprint(\n \"\"\"\nf. How many rules with head 100 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\nf1 = e5.copy()\ncount_100 = 0\nfor i in range(len(f1)):\n if f1[i][2] > 0.75:\n count_100 = count_100 + 1\n print('confidence value:', f1[i][2], ' association rule:', f1[i]\n [1], '→', f1[i][0])\nprint('Total:', count_100)\nprint(\n \"\"\"\ng. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200.\"\"\"\n )\ng2 = np.array(e1)\ng3 = g2[g2[:, 2].argsort()][::-1]\ng4 = []\nfor i in range(len(g3)):\n if g3[i][0] == 200:\n g4.append(g3[i].tolist())\ng5 = np.array(g4)\nfor i in range(10):\n print('confidence value:', g5[i][2], ' association rule:', g5[i][1],\n '→', g5[i][0])\nprint(\n \"\"\"\nh. How many rules with head 200 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\nh1 = g5.copy()\ncount_200 = 0\nfor i in range(len(h1)):\n if h1[i][2] > 0.75:\n count_200 = count_200 + 1\n print('confidence value:', h1[i][2], ' association rule:', h1[i]\n [1], '→', h1[i][0])\nprint('Total:', count_200)\nprint(\"\"\"\ni. soft-margin SVM with linear kernel\"\"\")\ni1 = e3[:, 1].copy()\ni2 = list(dict.fromkeys(i1))\ni3 = np.zeros((len(X), len(i2))).astype(int)\nfor i in range(len(X)):\n for j in range(len(i2)):\n if set(i2[j]).issubset(set(X[i])) == True:\n i3[i][j] = 1\n else:\n i3[i][j] = 0\n(data_train_lin_1, data_test_lin_1, data_train_label_lin_1,\n data_test_label_lin_1) = (train_test_split(i3, label, train_size=0.75,\n random_state=0, stratify=label))\nparameters_linear = [{'C': [0.5, 0.7, 0.9, 1.0, 1.5]}]\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_1, data_train_label_lin_1)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_1 = model_linear.predict(data_test_lin_1)\naccuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)\nprint('accurac:', accuracy_lin_1)\n(data_test_lin_2, data_train_lin_2, data_test_label_lin_2,\n data_train_label_lin_2) = (train_test_split(i3, label, train_size=0.25,\n random_state=0, stratify=label))\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_2, data_train_label_lin_2)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_2 = model_linear.predict(data_test_lin_2)\naccuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)\nprint('accurac:', accuracy_lin_2)\n(data_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3,\n data_temp2_label_lin_3) = (train_test_split(i3, label, train_size=0.375,\n random_state=0, stratify=label))\n(data_test_lin_3, data_temp3_lin_3, data_test_label_lin_3,\n data_temp3_label_lin_3) = (train_test_split(data_temp2_lin_3,\n data_temp2_label_lin_3, train_size=0.4, random_state=0, stratify=\n data_temp2_label_lin_3))\ndata_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))\ndata_train_label_lin_3 = np.hstack((data_temp1_label_lin_3,\n data_temp3_label_lin_3))\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_3, data_train_label_lin_3)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_3 = model_linear.predict(data_test_lin_3)\naccuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)\nprint('accurac:', accuracy_lin_3)\nscores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])\nprint('Average 3-fold classification accuracy(along with standard deviation):',\n scores_lin.mean(), '(+/-', scores_lin.std(), ')')\n",
"step-4": "import fim\nimport numpy as np\nfrom sklearn.model_selection import train_test_split, GridSearchCV\nfrom sklearn.svm import SVC\nfrom sklearn.metrics import accuracy_score\nf = open('house-votes-84.data', 'r')\nlines = f.readlines()\nX = []\nlabel = []\nfor line in lines:\n strpline = line.rstrip()\n arr = strpline.split(',')\n newline = []\n for i in range(len(arr)):\n if arr[i] == 'y':\n newline.append(i)\n if arr[0] == 'republican':\n newline.append(100)\n label.append(0)\n else:\n newline.append(200)\n label.append(1)\n X.append(newline)\nprint(\n 'a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?'\n )\na = np.array(fim.eclat(X, supp=20))\nprint(len(a))\nb1 = fim.eclat(X, supp=20, report='a')\nb2 = np.array(b1)\nb3 = b2[b2[:, 1].argsort()][::-1]\nprint(\"\"\"\nb. Write top 10 itemsets (in terms of highest support value).\"\"\")\nfor i in range(10):\n print(b3[i])\nprint(\"\"\"\nc. How many frequent itemsets have 100 as part of itemsets?\"\"\")\nc1 = []\na = np.array(a)\nfor i in range(len(a)):\n if 100 in a[i][0]:\n c1.append(a[i].tolist())\nc2 = np.array(c1)\nc3 = c2[c2[:, 1].argsort()][::-1].tolist()\nprint(len(c3))\nprint(\"\"\"\nd. How many frequent itemsets have 200 as part of itemsets?\"\"\")\nd1 = []\nfor i in range(len(a)):\n if 200 in a[i][0]:\n d1.append(a[i].tolist())\nd2 = np.array(d1)\nd3 = d2[d2[:, 1].argsort()][::-1].tolist()\nprint(len(d3))\nprint(\n \"\"\"\ne. Write top 10 association rules (in terms of highest confidence value) where the rules head is 100.\"\"\"\n )\ne1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)\ne2 = np.array(e1)\ne3 = e2[e2[:, 2].argsort()][::-1]\ne4 = []\nfor i in range(len(e3)):\n if e3[i][0] == 100:\n e4.append(e3[i].tolist())\ne5 = np.array(e4)\nfor i in range(10):\n print('confidence value:', e5[i][2], ' association rule:', e5[i][1],\n '→', e5[i][0])\nprint(\n \"\"\"\nf. How many rules with head 100 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\nf1 = e5.copy()\ncount_100 = 0\nfor i in range(len(f1)):\n if f1[i][2] > 0.75:\n count_100 = count_100 + 1\n print('confidence value:', f1[i][2], ' association rule:', f1[i]\n [1], '→', f1[i][0])\nprint('Total:', count_100)\nprint(\n \"\"\"\ng. Write top 10 association rules (in terms of highest confidence value) where the rules head is 200.\"\"\"\n )\ng2 = np.array(e1)\ng3 = g2[g2[:, 2].argsort()][::-1]\ng4 = []\nfor i in range(len(g3)):\n if g3[i][0] == 200:\n g4.append(g3[i].tolist())\ng5 = np.array(g4)\nfor i in range(10):\n print('confidence value:', g5[i][2], ' association rule:', g5[i][1],\n '→', g5[i][0])\nprint(\n \"\"\"\nh. How many rules with head 200 are there for which the confidence value is more than 75%? List them.\"\"\"\n )\nh1 = g5.copy()\ncount_200 = 0\nfor i in range(len(h1)):\n if h1[i][2] > 0.75:\n count_200 = count_200 + 1\n print('confidence value:', h1[i][2], ' association rule:', h1[i]\n [1], '→', h1[i][0])\nprint('Total:', count_200)\nprint(\"\"\"\ni. soft-margin SVM with linear kernel\"\"\")\ni1 = e3[:, 1].copy()\ni2 = list(dict.fromkeys(i1))\ni3 = np.zeros((len(X), len(i2))).astype(int)\nfor i in range(len(X)):\n for j in range(len(i2)):\n if set(i2[j]).issubset(set(X[i])) == True:\n i3[i][j] = 1\n else:\n i3[i][j] = 0\n(data_train_lin_1, data_test_lin_1, data_train_label_lin_1,\n data_test_label_lin_1) = (train_test_split(i3, label, train_size=0.75,\n random_state=0, stratify=label))\nparameters_linear = [{'C': [0.5, 0.7, 0.9, 1.0, 1.5]}]\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_1, data_train_label_lin_1)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_1 = model_linear.predict(data_test_lin_1)\naccuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)\nprint('accurac:', accuracy_lin_1)\n(data_test_lin_2, data_train_lin_2, data_test_label_lin_2,\n data_train_label_lin_2) = (train_test_split(i3, label, train_size=0.25,\n random_state=0, stratify=label))\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_2, data_train_label_lin_2)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_2 = model_linear.predict(data_test_lin_2)\naccuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)\nprint('accurac:', accuracy_lin_2)\n(data_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3,\n data_temp2_label_lin_3) = (train_test_split(i3, label, train_size=0.375,\n random_state=0, stratify=label))\n(data_test_lin_3, data_temp3_lin_3, data_test_label_lin_3,\n data_temp3_label_lin_3) = (train_test_split(data_temp2_lin_3,\n data_temp2_label_lin_3, train_size=0.4, random_state=0, stratify=\n data_temp2_label_lin_3))\ndata_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))\ndata_train_label_lin_3 = np.hstack((data_temp1_label_lin_3,\n data_temp3_label_lin_3))\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(\n data_train_lin_3, data_train_label_lin_3)\nprint('The best parameters: ', model_linear.best_params_)\npredicted_label_lin_3 = model_linear.predict(data_test_lin_3)\naccuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)\nprint('accurac:', accuracy_lin_3)\nscores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])\nprint('Average 3-fold classification accuracy(along with standard deviation):',\n scores_lin.mean(), '(+/-', scores_lin.std(), ')')\n",
"step-5": "# -*- coding: utf-8 -*-\r\n###########################\r\n# CSCI 573 Data Mining - Eclat and Linear Kernel SVM\r\n# Author: Chu-An Tsai\r\n# 12/14/2019\r\n###########################\r\n\r\nimport fim \r\nimport numpy as np\r\nfrom sklearn.model_selection import train_test_split, GridSearchCV\r\nfrom sklearn.svm import SVC\r\nfrom sklearn.metrics import accuracy_score\r\n\r\nf = open('house-votes-84.data','r')\r\nlines = f.readlines()\r\nX = []\r\nlabel = []\r\nfor line in lines:\r\n strpline = line.rstrip()\r\n arr = strpline.split(',')\r\n newline = [];\r\n for i in range(len(arr)):\r\n if arr[i] == 'y':\r\n newline.append(i)\r\n if arr[0] == 'republican':\r\n newline.append(100)\r\n label.append(0)\r\n else:\r\n newline.append(200)\r\n label.append(1)\r\n #print(*newline, sep=',')\r\n X.append(newline)\r\n\r\n################################# a.\r\nprint('a. Run the itemset mining algorithm with 20% support. How many frequent itemsets are there?')\r\na = np.array(fim.eclat(X, supp=20))\r\nprint(len(a)) \r\n\r\n################################# b.\r\nb1 = fim.eclat(X, supp=20, report='a')\r\nb2 = np.array(b1)\r\nb3 = b2[b2[:,1].argsort()][::-1]\r\nprint('\\nb. Write top 10 itemsets (in terms of highest support value).')\r\nfor i in range(10):\r\n print(b3[i])\r\n\r\n################################# c.\r\nprint('\\nc. How many frequent itemsets have 100 as part of itemsets?')\r\nc1 = []\r\na=np.array(a)\r\nfor i in range(len(a)):\r\n if 100 in a[i][0]:\r\n c1.append(a[i].tolist())\r\nc2 = np.array(c1)\r\nc3 = c2[c2[:,1].argsort()][::-1].tolist()\r\nprint(len(c3)) \r\n\r\n################################## d.\r\nprint('\\nd. How many frequent itemsets have 200 as part of itemsets?')\r\nd1 = []\r\nfor i in range(len(a)):\r\n if 200 in a[i][0]:\r\n d1.append(a[i].tolist())\r\nd2 = np.array(d1)\r\nd3 = d2[d2[:,1].argsort()][::-1].tolist()\r\nprint(len(d3)) \r\n \r\n################################## e.\r\nprint('\\ne. Write top 10 association rules (in terms of highest confidence value) where the rule''s head is 100.')\r\ne1 = fim.eclat(X, supp=20, target='r', report='c', conf=75.0001)\r\ne2 = np.array(e1)\r\ne3 = e2[e2[:,2].argsort()][::-1]\r\ne4 = []\r\nfor i in range(len(e3)):\r\n if e3[i][0] == 100:\r\n e4.append(e3[i].tolist())\r\ne5 = np.array(e4)\r\nfor i in range(10): \r\n print('confidence value:',e5[i][2],' association rule:', e5[i][1], '→', e5[i][0],)\r\n\r\n################################## f.\r\nprint('\\nf. How many rules with head 100 are there for which the confidence value is more than 75%? List them.')\r\nf1 = e5.copy()\r\ncount_100 = 0\r\nfor i in range(len(f1)):\r\n if (f1[i][2]) > 0.75:\r\n count_100 = count_100 + 1\r\n print('confidence value:', f1[i][2], ' association rule:', f1[i][1], '→', f1[i][0],)\r\nprint('Total:',count_100)\r\n\r\n################################## g.\r\nprint('\\ng. Write top 10 association rules (in terms of highest confidence value) where the rule''s head is 200.')\r\ng2 = np.array(e1)\r\ng3 = g2[g2[:,2].argsort()][::-1]\r\ng4 = []\r\nfor i in range(len(g3)):\r\n if g3[i][0] == 200:\r\n g4.append(g3[i].tolist())\r\ng5 = np.array(g4)\r\nfor i in range(10): \r\n print('confidence value:',g5[i][2],' association rule:', g5[i][1], '→', g5[i][0],)\r\n\r\n################################## h.\r\nprint('\\nh. How many rules with head 200 are there for which the confidence value is more than 75%? List them.')\r\nh1 = g5.copy()\r\ncount_200 = 0\r\nfor i in range(len(h1)):\r\n if (h1[i][2]) > 0.75:\r\n count_200 = count_200 + 1\r\n print('confidence value:', h1[i][2], ' association rule:', h1[i][1], '→', h1[i][0],)\r\nprint('Total:',count_200)\r\n\r\n################################### i.\r\nprint('\\ni. soft-margin SVM with linear kernel')\r\ni1 = e3[:,1].copy()\r\ni2 = list(dict.fromkeys(i1))\r\ni3 = np.zeros((len(X),len(i2))).astype(int)\r\n\r\nfor i in range(len(X)):\r\n for j in range(len(i2)):\r\n if (set(i2[j]).issubset(set(X[i]))) == True:\r\n i3[i][j] = 1\r\n else:\r\n i3[i][j] = 0\r\n\r\n# Training set = first 75% data, Tuning set = 25% from training set, Test set = last 25% data \r\n\r\ndata_train_lin_1, data_test_lin_1, data_train_label_lin_1, data_test_label_lin_1 = train_test_split(i3, label, train_size=0.75, random_state = 0, stratify = label)\r\n\r\n#C = np.arange(0.01, 2, 0.01)\r\n#parameters_linear = [{'C':C}]\r\nparameters_linear = [{'C':[0.5, 0.7, 0.9, 1.0, 1.5]}]\r\n\r\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_1, data_train_label_lin_1)\r\nprint('The best parameters: ', model_linear.best_params_)\r\n#print(\"Scores for crossvalidation:\")\r\n#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):\r\n #print(\"Accuracy: %0.6f for %r\" % (mean, params))\r\npredicted_label_lin_1 = model_linear.predict(data_test_lin_1)\r\naccuracy_lin_1 = accuracy_score(data_test_label_lin_1, predicted_label_lin_1)\r\nprint('accurac:',accuracy_lin_1)\r\n\r\n# Training set = last 75% data, Tuning set = 25% from training set, Test set = first 25% data \r\n\r\ndata_test_lin_2, data_train_lin_2, data_test_label_lin_2, data_train_label_lin_2 = train_test_split(i3, label, train_size=0.25, random_state = 0, stratify = label)\r\n\r\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_2, data_train_label_lin_2)\r\nprint('The best parameters: ', model_linear.best_params_)\r\n#print(\"Scores for crossvalidation:\")\r\n#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):\r\n #print(\"Accuracy: %0.6f for %r\" % (mean, params))\r\npredicted_label_lin_2 = model_linear.predict(data_test_lin_2)\r\naccuracy_lin_2 = accuracy_score(data_test_label_lin_2, predicted_label_lin_2)\r\nprint('accurac:',accuracy_lin_2)\r\n\r\n# Training set = first 37.5% and last 37.5%, Tuning set = 25% from training set, Test set = first 25% data \r\n\r\ndata_temp1_lin_3, data_temp2_lin_3, data_temp1_label_lin_3, data_temp2_label_lin_3 = train_test_split(i3, label, train_size=0.375, random_state = 0, stratify = label)\r\ndata_test_lin_3, data_temp3_lin_3, data_test_label_lin_3, data_temp3_label_lin_3 = train_test_split(data_temp2_lin_3, data_temp2_label_lin_3, train_size=0.4, random_state = 0, stratify = data_temp2_label_lin_3)\r\ndata_train_lin_3 = np.vstack((data_temp1_lin_3, data_temp3_lin_3))\r\ndata_train_label_lin_3 = np.hstack((data_temp1_label_lin_3, data_temp3_label_lin_3))\r\n\r\nmodel_linear = GridSearchCV(SVC(kernel='linear'), parameters_linear, cv=3).fit(data_train_lin_3, data_train_label_lin_3)\r\nprint('The best parameters: ', model_linear.best_params_)\r\n#print(\"Scores for crossvalidation:\")\r\n#for mean, params in zip(model_linear.cv_results_['mean_test_score'], model_linear.cv_results_['params']):\r\n #print(\"Accuracy: %0.6f for %r\" % (mean, params))\r\npredicted_label_lin_3 = model_linear.predict(data_test_lin_3)\r\naccuracy_lin_3 = accuracy_score(data_test_label_lin_3, predicted_label_lin_3)\r\nprint('accurac:',accuracy_lin_3)\r\n\r\nscores_lin = np.array([accuracy_lin_1, accuracy_lin_2, accuracy_lin_3])\r\nprint('Average 3-fold classification accuracy(along with standard deviation):', scores_lin.mean(), '(+/-',scores_lin.std(),')')\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#-*- coding: utf-8 -*-
import argparse
import pickle
def str2bool(v):
return v.lower() in ('true', '1')
arg_lists = []
parser = argparse.ArgumentParser()
def add_argument_group(name):
arg = parser.add_argument_group(name)
arg_lists.append(arg)
return arg
# Network
net_arg = add_argument_group('Network')
net_arg.add_argument('--num_steps', type=int, default=150, help='')
net_arg.add_argument('--cell_size', type=int, default=700, help='')
net_arg.add_argument('--hyper_size', type=int, default=400, help='')
net_arg.add_argument('--embed_size', type=int, default=128, help='')
net_arg.add_argument('--hidden_size', type=int, default=256, help='')
net_arg.add_argument('--num_layers', type=int, default=2, help='')
net_arg.add_argument('--fast_layers', type=int, default=2, help='')
net_arg.add_argument('--zoneout_c', type=float, default=0.5, help='')
net_arg.add_argument('--zoneout_h', type=float, default=0.9, help='')
net_arg.add_argument('--keep_prob', type=float, default=0.65, help='')
net_arg.add_argument('--input_dim', type=int, default=300, help='')
net_arg.add_argument('--num_glimpse', type=int, default=1, help='')
net_arg.add_argument('--use_terminal_symbol', type=str2bool, default=True, help='Not implemented yet')
# Data
data_arg = add_argument_group('Data')
data_arg.add_argument('--task', type=str, default='ptb')
data_arg.add_argument('--batch_size', type=int, default=128)
data_arg.add_argument('--vocab_size', type=int, default=50)
data_arg.add_argument('--input_size', type=int, default=300)
data_arg.add_argument('--min_data_length', type=int, default=5)
data_arg.add_argument('--max_data_length', type=int, default=80)
data_arg.add_argument('--train_num', type=int, default=1000000)
data_arg.add_argument('--valid_num', type=int, default=1000)
data_arg.add_argument('--test_num', type=int, default=1000)
# Training / test parameters
train_arg = add_argument_group('Training')
train_arg.add_argument('--is_train', type=str2bool, default=True, help='')
train_arg.add_argument('--optimizer', type=str, default='rmsprop', help='')
train_arg.add_argument('--max_epoch', type=int, default=200, help='')
train_arg.add_argument('--max_max_epoch', type=int, default=200, help='')
train_arg.add_argument('--max_step', type=int, default=1000000, help='')
train_arg.add_argument('--init_scale', type=float, default=0.002, help='')
train_arg.add_argument('--lr_start', type=float, default=0.01, help='')
train_arg.add_argument('--lr_decay_step', type=int, default=5000, help='')
train_arg.add_argument('--lr_decay_rate', type=float, default= 0.1, help='')
train_arg.add_argument('--max_grad_norm', type=float, default=1.0, help='')
train_arg.add_argument('--checkpoint_secs', type=int, default=300, help='')
# Misc
misc_arg = add_argument_group('Misc')
misc_arg.add_argument('--log_step', type=int, default=2, help='')
misc_arg.add_argument('--num_log_samples', type=int, default=3, help='')
misc_arg.add_argument('--log_level', type=str, default='INFO', choices=['INFO', 'DEBUG', 'WARN'], help='')
misc_arg.add_argument('--log_dir', type=str, default='logs')
misc_arg.add_argument('--data_dir', type=str, default='data')
misc_arg.add_argument('--output_dir', type=str, default='outputs')
misc_arg.add_argument('--data_path', type=str, default='/Ujjawal/fast-slow-lstm/data' )
misc_arg.add_argument('--debug', type=str2bool, default=False)
misc_arg.add_argument('--gpu_memory_fraction', type=float, default=1.0)
misc_arg.add_argument('--random_seed', type=int, default=123, help='')
def get_config():
config, unparsed = parser.parse_known_args()
return config
|
normal
|
{
"blob_id": "dfaea1687238d3d09fee072689cfdea392bc78f9",
"index": 8967,
"step-1": "<mask token>\n\n\ndef str2bool(v):\n return v.lower() in ('true', '1')\n\n\n<mask token>\n\n\ndef add_argument_group(name):\n arg = parser.add_argument_group(name)\n arg_lists.append(arg)\n return arg\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef str2bool(v):\n return v.lower() in ('true', '1')\n\n\n<mask token>\n\n\ndef add_argument_group(name):\n arg = parser.add_argument_group(name)\n arg_lists.append(arg)\n return arg\n\n\n<mask token>\n\n\ndef get_config():\n config, unparsed = parser.parse_known_args()\n return config\n",
"step-3": "<mask token>\n\n\ndef str2bool(v):\n return v.lower() in ('true', '1')\n\n\narg_lists = []\nparser = argparse.ArgumentParser()\n\n\ndef add_argument_group(name):\n arg = parser.add_argument_group(name)\n arg_lists.append(arg)\n return arg\n\n\nnet_arg = add_argument_group('Network')\nnet_arg.add_argument('--num_steps', type=int, default=150, help='')\nnet_arg.add_argument('--cell_size', type=int, default=700, help='')\nnet_arg.add_argument('--hyper_size', type=int, default=400, help='')\nnet_arg.add_argument('--embed_size', type=int, default=128, help='')\nnet_arg.add_argument('--hidden_size', type=int, default=256, help='')\nnet_arg.add_argument('--num_layers', type=int, default=2, help='')\nnet_arg.add_argument('--fast_layers', type=int, default=2, help='')\nnet_arg.add_argument('--zoneout_c', type=float, default=0.5, help='')\nnet_arg.add_argument('--zoneout_h', type=float, default=0.9, help='')\nnet_arg.add_argument('--keep_prob', type=float, default=0.65, help='')\nnet_arg.add_argument('--input_dim', type=int, default=300, help='')\nnet_arg.add_argument('--num_glimpse', type=int, default=1, help='')\nnet_arg.add_argument('--use_terminal_symbol', type=str2bool, default=True,\n help='Not implemented yet')\ndata_arg = add_argument_group('Data')\ndata_arg.add_argument('--task', type=str, default='ptb')\ndata_arg.add_argument('--batch_size', type=int, default=128)\ndata_arg.add_argument('--vocab_size', type=int, default=50)\ndata_arg.add_argument('--input_size', type=int, default=300)\ndata_arg.add_argument('--min_data_length', type=int, default=5)\ndata_arg.add_argument('--max_data_length', type=int, default=80)\ndata_arg.add_argument('--train_num', type=int, default=1000000)\ndata_arg.add_argument('--valid_num', type=int, default=1000)\ndata_arg.add_argument('--test_num', type=int, default=1000)\ntrain_arg = add_argument_group('Training')\ntrain_arg.add_argument('--is_train', type=str2bool, default=True, help='')\ntrain_arg.add_argument('--optimizer', type=str, default='rmsprop', help='')\ntrain_arg.add_argument('--max_epoch', type=int, default=200, help='')\ntrain_arg.add_argument('--max_max_epoch', type=int, default=200, help='')\ntrain_arg.add_argument('--max_step', type=int, default=1000000, help='')\ntrain_arg.add_argument('--init_scale', type=float, default=0.002, help='')\ntrain_arg.add_argument('--lr_start', type=float, default=0.01, help='')\ntrain_arg.add_argument('--lr_decay_step', type=int, default=5000, help='')\ntrain_arg.add_argument('--lr_decay_rate', type=float, default=0.1, help='')\ntrain_arg.add_argument('--max_grad_norm', type=float, default=1.0, help='')\ntrain_arg.add_argument('--checkpoint_secs', type=int, default=300, help='')\nmisc_arg = add_argument_group('Misc')\nmisc_arg.add_argument('--log_step', type=int, default=2, help='')\nmisc_arg.add_argument('--num_log_samples', type=int, default=3, help='')\nmisc_arg.add_argument('--log_level', type=str, default='INFO', choices=[\n 'INFO', 'DEBUG', 'WARN'], help='')\nmisc_arg.add_argument('--log_dir', type=str, default='logs')\nmisc_arg.add_argument('--data_dir', type=str, default='data')\nmisc_arg.add_argument('--output_dir', type=str, default='outputs')\nmisc_arg.add_argument('--data_path', type=str, default=\n '/Ujjawal/fast-slow-lstm/data')\nmisc_arg.add_argument('--debug', type=str2bool, default=False)\nmisc_arg.add_argument('--gpu_memory_fraction', type=float, default=1.0)\nmisc_arg.add_argument('--random_seed', type=int, default=123, help='')\n\n\ndef get_config():\n config, unparsed = parser.parse_known_args()\n return config\n",
"step-4": "import argparse\nimport pickle\n\n\ndef str2bool(v):\n return v.lower() in ('true', '1')\n\n\narg_lists = []\nparser = argparse.ArgumentParser()\n\n\ndef add_argument_group(name):\n arg = parser.add_argument_group(name)\n arg_lists.append(arg)\n return arg\n\n\nnet_arg = add_argument_group('Network')\nnet_arg.add_argument('--num_steps', type=int, default=150, help='')\nnet_arg.add_argument('--cell_size', type=int, default=700, help='')\nnet_arg.add_argument('--hyper_size', type=int, default=400, help='')\nnet_arg.add_argument('--embed_size', type=int, default=128, help='')\nnet_arg.add_argument('--hidden_size', type=int, default=256, help='')\nnet_arg.add_argument('--num_layers', type=int, default=2, help='')\nnet_arg.add_argument('--fast_layers', type=int, default=2, help='')\nnet_arg.add_argument('--zoneout_c', type=float, default=0.5, help='')\nnet_arg.add_argument('--zoneout_h', type=float, default=0.9, help='')\nnet_arg.add_argument('--keep_prob', type=float, default=0.65, help='')\nnet_arg.add_argument('--input_dim', type=int, default=300, help='')\nnet_arg.add_argument('--num_glimpse', type=int, default=1, help='')\nnet_arg.add_argument('--use_terminal_symbol', type=str2bool, default=True,\n help='Not implemented yet')\ndata_arg = add_argument_group('Data')\ndata_arg.add_argument('--task', type=str, default='ptb')\ndata_arg.add_argument('--batch_size', type=int, default=128)\ndata_arg.add_argument('--vocab_size', type=int, default=50)\ndata_arg.add_argument('--input_size', type=int, default=300)\ndata_arg.add_argument('--min_data_length', type=int, default=5)\ndata_arg.add_argument('--max_data_length', type=int, default=80)\ndata_arg.add_argument('--train_num', type=int, default=1000000)\ndata_arg.add_argument('--valid_num', type=int, default=1000)\ndata_arg.add_argument('--test_num', type=int, default=1000)\ntrain_arg = add_argument_group('Training')\ntrain_arg.add_argument('--is_train', type=str2bool, default=True, help='')\ntrain_arg.add_argument('--optimizer', type=str, default='rmsprop', help='')\ntrain_arg.add_argument('--max_epoch', type=int, default=200, help='')\ntrain_arg.add_argument('--max_max_epoch', type=int, default=200, help='')\ntrain_arg.add_argument('--max_step', type=int, default=1000000, help='')\ntrain_arg.add_argument('--init_scale', type=float, default=0.002, help='')\ntrain_arg.add_argument('--lr_start', type=float, default=0.01, help='')\ntrain_arg.add_argument('--lr_decay_step', type=int, default=5000, help='')\ntrain_arg.add_argument('--lr_decay_rate', type=float, default=0.1, help='')\ntrain_arg.add_argument('--max_grad_norm', type=float, default=1.0, help='')\ntrain_arg.add_argument('--checkpoint_secs', type=int, default=300, help='')\nmisc_arg = add_argument_group('Misc')\nmisc_arg.add_argument('--log_step', type=int, default=2, help='')\nmisc_arg.add_argument('--num_log_samples', type=int, default=3, help='')\nmisc_arg.add_argument('--log_level', type=str, default='INFO', choices=[\n 'INFO', 'DEBUG', 'WARN'], help='')\nmisc_arg.add_argument('--log_dir', type=str, default='logs')\nmisc_arg.add_argument('--data_dir', type=str, default='data')\nmisc_arg.add_argument('--output_dir', type=str, default='outputs')\nmisc_arg.add_argument('--data_path', type=str, default=\n '/Ujjawal/fast-slow-lstm/data')\nmisc_arg.add_argument('--debug', type=str2bool, default=False)\nmisc_arg.add_argument('--gpu_memory_fraction', type=float, default=1.0)\nmisc_arg.add_argument('--random_seed', type=int, default=123, help='')\n\n\ndef get_config():\n config, unparsed = parser.parse_known_args()\n return config\n",
"step-5": "#-*- coding: utf-8 -*-\nimport argparse\nimport pickle\n\ndef str2bool(v):\n return v.lower() in ('true', '1')\n\n\narg_lists = []\nparser = argparse.ArgumentParser()\n\ndef add_argument_group(name):\n arg = parser.add_argument_group(name)\n arg_lists.append(arg)\n return arg\n\n\n# Network\nnet_arg = add_argument_group('Network')\nnet_arg.add_argument('--num_steps', type=int, default=150, help='')\nnet_arg.add_argument('--cell_size', type=int, default=700, help='')\nnet_arg.add_argument('--hyper_size', type=int, default=400, help='')\nnet_arg.add_argument('--embed_size', type=int, default=128, help='')\nnet_arg.add_argument('--hidden_size', type=int, default=256, help='')\nnet_arg.add_argument('--num_layers', type=int, default=2, help='')\nnet_arg.add_argument('--fast_layers', type=int, default=2, help='')\nnet_arg.add_argument('--zoneout_c', type=float, default=0.5, help='')\nnet_arg.add_argument('--zoneout_h', type=float, default=0.9, help='')\nnet_arg.add_argument('--keep_prob', type=float, default=0.65, help='')\nnet_arg.add_argument('--input_dim', type=int, default=300, help='')\nnet_arg.add_argument('--num_glimpse', type=int, default=1, help='')\nnet_arg.add_argument('--use_terminal_symbol', type=str2bool, default=True, help='Not implemented yet')\n\n# Data\ndata_arg = add_argument_group('Data')\ndata_arg.add_argument('--task', type=str, default='ptb')\ndata_arg.add_argument('--batch_size', type=int, default=128)\ndata_arg.add_argument('--vocab_size', type=int, default=50)\ndata_arg.add_argument('--input_size', type=int, default=300)\ndata_arg.add_argument('--min_data_length', type=int, default=5)\ndata_arg.add_argument('--max_data_length', type=int, default=80)\ndata_arg.add_argument('--train_num', type=int, default=1000000)\ndata_arg.add_argument('--valid_num', type=int, default=1000)\ndata_arg.add_argument('--test_num', type=int, default=1000)\n\n# Training / test parameters\ntrain_arg = add_argument_group('Training')\ntrain_arg.add_argument('--is_train', type=str2bool, default=True, help='')\ntrain_arg.add_argument('--optimizer', type=str, default='rmsprop', help='')\n\ntrain_arg.add_argument('--max_epoch', type=int, default=200, help='')\ntrain_arg.add_argument('--max_max_epoch', type=int, default=200, help='')\n\n\ntrain_arg.add_argument('--max_step', type=int, default=1000000, help='')\ntrain_arg.add_argument('--init_scale', type=float, default=0.002, help='')\ntrain_arg.add_argument('--lr_start', type=float, default=0.01, help='')\ntrain_arg.add_argument('--lr_decay_step', type=int, default=5000, help='')\ntrain_arg.add_argument('--lr_decay_rate', type=float, default= 0.1, help='')\ntrain_arg.add_argument('--max_grad_norm', type=float, default=1.0, help='')\ntrain_arg.add_argument('--checkpoint_secs', type=int, default=300, help='')\n\n# Misc\nmisc_arg = add_argument_group('Misc')\nmisc_arg.add_argument('--log_step', type=int, default=2, help='')\nmisc_arg.add_argument('--num_log_samples', type=int, default=3, help='')\nmisc_arg.add_argument('--log_level', type=str, default='INFO', choices=['INFO', 'DEBUG', 'WARN'], help='')\nmisc_arg.add_argument('--log_dir', type=str, default='logs')\nmisc_arg.add_argument('--data_dir', type=str, default='data')\nmisc_arg.add_argument('--output_dir', type=str, default='outputs')\nmisc_arg.add_argument('--data_path', type=str, default='/Ujjawal/fast-slow-lstm/data' )\nmisc_arg.add_argument('--debug', type=str2bool, default=False)\nmisc_arg.add_argument('--gpu_memory_fraction', type=float, default=1.0)\nmisc_arg.add_argument('--random_seed', type=int, default=123, help='')\n\ndef get_config():\n config, unparsed = parser.parse_known_args()\n return config\n\n",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
#!ipython3
pi_f = 0.1415926
pi = []
for i in range(10):
pi.append(str(pi_f * i*16)[0])
print(pi)
def convertBase(digits, baseA, baseB, precisionB):
return output
#0.56 b8 to b10
#(1/base) ^ (i+1) *x
to10('56')
test = list(str(56))
test
27 9 3
33
0.3212 * 3
4*1.5
0.3212* 4/6
3*3**-1
2*3**-2
1*3**-3
2*3**-4
# 2*10
# 16+4 = 0x14
# 0x16 = 16 + 6 = 22
# 0x22 / 0xa = 2 r 2
16*2+2
#34/10 = 3 r 4
30%16
#14
# 1*16 + 14
# 14 = 0xE
# 1*16+14 = 0x1E
0x3/0xA
# 3/10 = 0.3
# 3/10 = 0 r 3
# Solange durch die neue basis teilen, bis ein unteilbarer rest übrig ist.
# Diese Teilung bringt ganze Zahlen bei der Division hervor.
# Diese ist die nächste Zahl, welche widerum geteilt wird.
# to base-3
0x2BA
16**3
#schema
4096 256 16 1
#hier nur 256 und weniger von interesse
2*256 + 0xB*16 + 0xA*1
11*16
512+ 10+ 176 = 698
0x2BA = 2*256 + B*16 + A*1 = 698
698/3
0x2BA%0x03
0x2B8/0x03
232/16
16*14+8
0xe8%3
0xe7/3
77%3
75/3
25%3
24/3
8%3
6/3
2%3
0/3
# mod's above order:
# 212122
# reversed, true order:
# 221212
# base-8 to base-10
0o72
0o72%10
0o62/10
0o5%10
0o0/10
0o0.56
0o12
56%12
48/12
4%12
0/12
0.5/0.12
5*8**-1
6*8**-2
7*8**1
2*8**0
0o56 to 0x...
0.625/16
= 0.0390625
import math
def runMult(fraction, baseB=16):
output = []
return mult(fraction, baseB, output)
def mult(fraction, baseB, output):
'''
only base-16 now!
'''
prod = fraction * float(baseB)
print("prod: ",prod)
int_prod = int(math.floor(prod))
if int_prod >= 1:
output.append(int_prod)
radix_right = prod - int_prod
print("radix_right: ", radix_right)
if radix_right == 0.0:
print("output: ", output)
return output
else:
mult(radix_right, baseB, output)
(mult(0.5))
(mult(0.56))
runMult(0.71875, 8)
runMult(0.1415926535)
p = math.pi-3
p
(((((((((((((((((((((((((p*16)-2)*16)-4)*16)-3)*16)-15)*16)-6)*16)-10)*16)-8)*16)-8)*16)-8)*16)-5)*16)-10)*16)-3)*16)
0.56
d = 5*8**-1 + 6*8**-2
((((d*16)-11)*16)-8)
11 = b
8 = 8
0o0.56 == 0x0.b8
#b16 to b26
0x0.243f6a8885a3
0.3HIGBEBOHK
def toDec(digits, baseA):
'''
takes fractional part as list
Example:
0.56 = [5,6]
toDec(56, 8)
out: 0.71875
'''
# digit_list = list(str(digits))
digit_list = digits
dec_list = []
# print(digit_list)
for i, d in enumerate(digit_list):
dec_d = float(d)*baseA**-(i+1)
dec_list.append(dec_d)
# print(dec_list)
output = 0.0
for i in dec_list:
output += i
return output
toDec([5,6], 8)
toDec([2,4,3,15,6,10,8,8,8,5,10,3], 16)
def toBase(input, baseA, baseB):
dec = toDec(input, baseA)
0.3212 *3
0.9636
1.4040
0.404 *3
2.020
0.02 *3
0.1
0.1 *3
0.3 *3
1.3
1.3 *3
120011111...
# CORRECT !!! ################################################################
0.56 #base-8 multiplication, 10b10 = 12b8
0.56*12
50 60
5.6
0.75
6.2
7.14
0.14 * 12
0.04 *10 / 8
40 -> 50
0.50
0.1 *10 / 8
10 -> 12
1.2
1.2 + 0.5
1.7
0.7 * 12
7.0
10.6
0.6 *12
7.4
0.4*12
5.0
0.71875
|
normal
|
{
"blob_id": "cffc64970cb82072e5fb949f62e9778942b2be96",
"index": 8269,
"step-1": "#!ipython3\n\npi_f = 0.1415926\npi = []\nfor i in range(10):\n pi.append(str(pi_f * i*16)[0])\n\nprint(pi)\n\n\ndef convertBase(digits, baseA, baseB, precisionB):\n return output\n\n#0.56 b8 to b10\n#(1/base) ^ (i+1) *x\n\n\nto10('56')\n\ntest = list(str(56))\ntest\n\n27 9 3\n 33\n\n\n0.3212 * 3\n4*1.5\n0.3212* 4/6\n\n3*3**-1\n2*3**-2\n1*3**-3\n2*3**-4\n\n# 2*10 \n# 16+4 = 0x14\n# 0x16 = 16 + 6 = 22\n# 0x22 / 0xa = 2 r 2 \n16*2+2\n#34/10 = 3 r 4\n30%16\n#14\n# 1*16 + 14\n# 14 = 0xE\n# 1*16+14 = 0x1E\n\n0x3/0xA\n# 3/10 = 0.3\n# 3/10 = 0 r 3\n\n# Solange durch die neue basis teilen, bis ein unteilbarer rest übrig ist.\n# Diese Teilung bringt ganze Zahlen bei der Division hervor.\n# Diese ist die nächste Zahl, welche widerum geteilt wird.\n\n# to base-3\n0x2BA\n\n16**3\n#schema\n4096 256 16 1\n#hier nur 256 und weniger von interesse\n2*256 + 0xB*16 + 0xA*1\n11*16\n512+ 10+ 176 = 698\n0x2BA = 2*256 + B*16 + A*1 = 698\n\n698/3\n0x2BA%0x03\n0x2B8/0x03\n232/16\n16*14+8\n0xe8%3\n0xe7/3\n77%3\n75/3\n25%3\n24/3\n8%3\n6/3\n2%3\n0/3\n\n# mod's above order:\n# 212122\n# reversed, true order:\n# 221212\n\n# base-8 to base-10\n0o72\n\n0o72%10\n0o62/10\n0o5%10\n0o0/10\n\n\n0o0.56\n0o12\n56%12\n48/12\n4%12\n0/12\n\n0.5/0.12\n\n5*8**-1\n6*8**-2\n\n7*8**1\n2*8**0\n\n0o56 to 0x...\n\n0.625/16\n= 0.0390625\n\nimport math\ndef runMult(fraction, baseB=16):\n output = []\n return mult(fraction, baseB, output)\n\ndef mult(fraction, baseB, output):\n '''\n only base-16 now!\n '''\n prod = fraction * float(baseB)\n print(\"prod: \",prod)\n int_prod = int(math.floor(prod))\n if int_prod >= 1:\n output.append(int_prod)\n radix_right = prod - int_prod\n print(\"radix_right: \", radix_right)\n if radix_right == 0.0:\n print(\"output: \", output)\n return output\n else:\n mult(radix_right, baseB, output)\n\n(mult(0.5))\n(mult(0.56))\nrunMult(0.71875, 8)\nrunMult(0.1415926535)\n\np = math.pi-3\np\n(((((((((((((((((((((((((p*16)-2)*16)-4)*16)-3)*16)-15)*16)-6)*16)-10)*16)-8)*16)-8)*16)-8)*16)-5)*16)-10)*16)-3)*16)\n\n0.56\nd = 5*8**-1 + 6*8**-2\n((((d*16)-11)*16)-8)\n11 = b\n8 = 8\n\n0o0.56 == 0x0.b8\n\n#b16 to b26\n0x0.243f6a8885a3\n0.3HIGBEBOHK\n\n\ndef toDec(digits, baseA):\n '''\n takes fractional part as list\n Example:\n 0.56 = [5,6]\n toDec(56, 8)\n out: 0.71875\n '''\n # digit_list = list(str(digits))\n digit_list = digits\n dec_list = []\n # print(digit_list)\n for i, d in enumerate(digit_list):\n dec_d = float(d)*baseA**-(i+1)\n dec_list.append(dec_d)\n # print(dec_list)\n output = 0.0\n for i in dec_list:\n output += i\n return output\n\ntoDec([5,6], 8)\ntoDec([2,4,3,15,6,10,8,8,8,5,10,3], 16)\n\ndef toBase(input, baseA, baseB):\n dec = toDec(input, baseA)\n\n\n\n0.3212 *3\n0.9636\n1.4040\n0.404 *3\n2.020\n0.02 *3\n0.1\n0.1 *3\n0.3 *3\n1.3\n1.3 *3\n\n120011111...\n\n# CORRECT !!! ################################################################\n0.56 #base-8 multiplication, 10b10 = 12b8 \n0.56*12\n 50 60\n5.6\n0.75\n6.2\n7.14\n0.14 * 12\n 0.04 *10 / 8\n 40 -> 50\n 0.50\n 0.1 *10 / 8\n 10 -> 12\n 1.2\n 1.2 + 0.5\n 1.7\n0.7 * 12\n7.0\n10.6\n0.6 *12\n7.4\n0.4*12\n5.0\n\n0.71875\n\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class ToolBusiness(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ToolBusiness(object):
@classmethod
def get_tool_ip(cls):
ip = request.args.get('ip')
url = 'http://api.map.baidu.com/location/ip'
params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}
ret = requests.get(url=url, params=params)
ret = json.loads(ret.content)
if ret and 'status' in ret and ret['status'
] == 0 and 'content' in ret and 'address' in ret:
return ret['status'], ret['content'], ret['address'], 'ok'
return 101, '', '', '获取失败'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ToolBusiness(object):
@classmethod
def get_tool_ip(cls):
ip = request.args.get('ip')
url = 'http://api.map.baidu.com/location/ip'
params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}
ret = requests.get(url=url, params=params)
ret = json.loads(ret.content)
if ret and 'status' in ret and ret['status'
] == 0 and 'content' in ret and 'address' in ret:
return ret['status'], ret['content'], ret['address'], 'ok'
return 101, '', '', '获取失败'
@classmethod
def apk_analysis(cls, apk_download_url, type=1):
try:
target_path = '/tmp/packages/'
if not os.path.exists(target_path):
os.mkdir(target_path)
date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')
target_name = '{}.apk'.format(date_time_now)
download_apk_name = os.path.join(target_path, target_name)
current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url,
download_apk_name))
response = requests.get(url=apk_download_url, verify=False)
with open(download_apk_name, 'wb') as f:
f.write(response.content)
time.sleep(0.5)
if not os.path.exists(download_apk_name):
current_app.logger.error('{} 下载失败!'.format(apk_download_url))
return 102, '下载失败'
current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))
current_app.logger.info('开始分析')
package_info_re = re.compile(
"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*"
, re.I)
label_icon_re = re.compile("application: label='(.+)'.*icon='(.+)'"
, re.I)
launchable_activity_re = re.compile(
"launchable-activity: name='(.+)'.*label.*", re.I)
apk_info = {}
cmd = '/usr/local/bin/aapt dump badging {}'.format(
download_apk_name)
command_process = subprocess.Popen(cmd, shell=True, stdout=
subprocess.PIPE, stderr=subprocess.STDOUT)
infos = command_process.stdout.readlines()
for info in infos:
info = info.decode('utf-8')
if info.startswith('package:'):
temp = package_info_re.search(info)
apk_info['package_name'] = temp.group(1)
apk_info['version_code'] = temp.group(2) or 0
apk_info['version_name'] = temp.group(3)
elif info.startswith('application:'):
temp = label_icon_re.search(info)
apk_info['label'] = temp.group(1)
apk_info['icon'] = temp.group(2)
elif info.startswith('launchable-activity:'):
temp = launchable_activity_re.search(info)
apk_info['default_activity'] = temp.group(1)
try:
size = round(os.path.getsize(download_apk_name) / float(
1024 * 1024), 2)
apk_info['size'] = str(size)
zip = zipfile.ZipFile(download_apk_name)
icon_binary = zip.read(apk_info['icon'])
time_now = datetime.now().strftime('%Y%m%d.%H%M%S')
picture = f'monkey-{time_now}.png'
dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'
if not os.path.exists(TCLOUD_FILE_TEMP_PATH):
os.mkdir(TCLOUD_FILE_TEMP_PATH)
if not os.path.exists(dir_path):
os.mkdir(dir_path)
with open(f'{dir_path}/{picture}', 'wb') as f:
f.write(icon_binary)
apk_info['icon'] = oss_upload_monkey_package_picture(dir_path,
picture)
except Exception as e:
current_app.logger.warning(e)
current_app.logger.warning(traceback.format_exc())
current_app.logger.info(apk_info)
if type == 1:
pass
elif type == 2:
pass
return apk_info
except Exception as e:
current_app.logger.error(e)
current_app.logger.error(traceback.format_exc())
return {}
<|reserved_special_token_1|>
import base64
import json
import os
import re
import subprocess
import time
import traceback
import zipfile
from datetime import datetime
import requests
from flask import request, current_app
from library.oss import oss_upload_monkey_package_picture
from public_config import TCLOUD_FILE_TEMP_PATH
class ToolBusiness(object):
@classmethod
def get_tool_ip(cls):
ip = request.args.get('ip')
url = 'http://api.map.baidu.com/location/ip'
params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}
ret = requests.get(url=url, params=params)
ret = json.loads(ret.content)
if ret and 'status' in ret and ret['status'
] == 0 and 'content' in ret and 'address' in ret:
return ret['status'], ret['content'], ret['address'], 'ok'
return 101, '', '', '获取失败'
@classmethod
def apk_analysis(cls, apk_download_url, type=1):
try:
target_path = '/tmp/packages/'
if not os.path.exists(target_path):
os.mkdir(target_path)
date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')
target_name = '{}.apk'.format(date_time_now)
download_apk_name = os.path.join(target_path, target_name)
current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url,
download_apk_name))
response = requests.get(url=apk_download_url, verify=False)
with open(download_apk_name, 'wb') as f:
f.write(response.content)
time.sleep(0.5)
if not os.path.exists(download_apk_name):
current_app.logger.error('{} 下载失败!'.format(apk_download_url))
return 102, '下载失败'
current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))
current_app.logger.info('开始分析')
package_info_re = re.compile(
"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*"
, re.I)
label_icon_re = re.compile("application: label='(.+)'.*icon='(.+)'"
, re.I)
launchable_activity_re = re.compile(
"launchable-activity: name='(.+)'.*label.*", re.I)
apk_info = {}
cmd = '/usr/local/bin/aapt dump badging {}'.format(
download_apk_name)
command_process = subprocess.Popen(cmd, shell=True, stdout=
subprocess.PIPE, stderr=subprocess.STDOUT)
infos = command_process.stdout.readlines()
for info in infos:
info = info.decode('utf-8')
if info.startswith('package:'):
temp = package_info_re.search(info)
apk_info['package_name'] = temp.group(1)
apk_info['version_code'] = temp.group(2) or 0
apk_info['version_name'] = temp.group(3)
elif info.startswith('application:'):
temp = label_icon_re.search(info)
apk_info['label'] = temp.group(1)
apk_info['icon'] = temp.group(2)
elif info.startswith('launchable-activity:'):
temp = launchable_activity_re.search(info)
apk_info['default_activity'] = temp.group(1)
try:
size = round(os.path.getsize(download_apk_name) / float(
1024 * 1024), 2)
apk_info['size'] = str(size)
zip = zipfile.ZipFile(download_apk_name)
icon_binary = zip.read(apk_info['icon'])
time_now = datetime.now().strftime('%Y%m%d.%H%M%S')
picture = f'monkey-{time_now}.png'
dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'
if not os.path.exists(TCLOUD_FILE_TEMP_PATH):
os.mkdir(TCLOUD_FILE_TEMP_PATH)
if not os.path.exists(dir_path):
os.mkdir(dir_path)
with open(f'{dir_path}/{picture}', 'wb') as f:
f.write(icon_binary)
apk_info['icon'] = oss_upload_monkey_package_picture(dir_path,
picture)
except Exception as e:
current_app.logger.warning(e)
current_app.logger.warning(traceback.format_exc())
current_app.logger.info(apk_info)
if type == 1:
pass
elif type == 2:
pass
return apk_info
except Exception as e:
current_app.logger.error(e)
current_app.logger.error(traceback.format_exc())
return {}
<|reserved_special_token_1|>
#!/usr/bin/python
# -*- coding: utf-8 -*-
import base64
import json
import os
import re
import subprocess
import time
import traceback
import zipfile
from datetime import datetime
import requests
from flask import request, current_app
from library.oss import oss_upload_monkey_package_picture
from public_config import TCLOUD_FILE_TEMP_PATH
class ToolBusiness(object):
@classmethod
def get_tool_ip(cls):
ip = request.args.get('ip')
url = 'http://api.map.baidu.com/location/ip'
params = {"ip": ip, "ak": 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}
ret = requests.get(url=url, params=params)
ret = json.loads(ret.content)
if ret and 'status' in ret and ret['status'] == 0 and 'content' in ret and 'address' in ret:
return ret['status'], ret['content'], ret['address'], 'ok'
return 101, '', '', '获取失败'
@classmethod
def apk_analysis(cls, apk_download_url, type=1):
try:
# type 1 : not save , 2: save to db
target_path = "/tmp/packages/"
if not os.path.exists(target_path):
os.mkdir(target_path)
date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')
target_name = '{}.apk'.format(date_time_now)
download_apk_name = os.path.join(target_path, target_name)
current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url, download_apk_name))
response = requests.get(url=apk_download_url, verify=False)
with open(download_apk_name, 'wb') as f:
f.write(response.content)
time.sleep(0.5)
# 下载失败
if not os.path.exists(download_apk_name):
current_app.logger.error('{} 下载失败!'.format(apk_download_url))
return 102, "下载失败"
current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))
current_app.logger.info('开始分析')
package_info_re = re.compile(r"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*", re.I)
label_icon_re = re.compile(r"application: label='(.+)'.*icon='(.+)'", re.I)
launchable_activity_re = re.compile(r"launchable-activity: name='(.+)'.*label.*", re.I)
apk_info = {}
cmd = '/usr/local/bin/aapt dump badging {}'.format(download_apk_name)
command_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
infos = command_process.stdout.readlines()
for info in infos:
info = info.decode('utf-8')
if info.startswith('package:'):
temp = package_info_re.search(info)
apk_info['package_name'] = temp.group(1)
apk_info['version_code'] = temp.group(2) or 0
apk_info['version_name'] = temp.group(3)
elif info.startswith('application:'):
temp = label_icon_re.search(info)
apk_info['label'] = temp.group(1)
apk_info['icon'] = temp.group(2)
elif info.startswith('launchable-activity:'):
temp = launchable_activity_re.search(info)
apk_info['default_activity'] = temp.group(1)
try:
size = round(os.path.getsize(download_apk_name) / float(1024 * 1024), 2)
apk_info['size'] = str(size)
zip = zipfile.ZipFile(download_apk_name)
icon_binary = zip.read(apk_info['icon'])
time_now = datetime.now().strftime('%Y%m%d.%H%M%S')
picture = f'monkey-{time_now}.png'
dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'
if not os.path.exists(TCLOUD_FILE_TEMP_PATH):
os.mkdir(TCLOUD_FILE_TEMP_PATH)
if not os.path.exists(dir_path):
os.mkdir(dir_path)
with open(f'{dir_path}/{picture}', 'wb') as f:
f.write(icon_binary)
apk_info['icon'] = oss_upload_monkey_package_picture(dir_path, picture)
except Exception as e:
current_app.logger.warning(e)
current_app.logger.warning(traceback.format_exc())
current_app.logger.info(apk_info)
if type == 1:
pass
elif type == 2:
pass
return apk_info
except Exception as e:
current_app.logger.error(e)
current_app.logger.error(traceback.format_exc())
return {}
|
flexible
|
{
"blob_id": "bf45349a9fdfcef7392c477e089c5e3916cb4c8e",
"index": 8502,
"step-1": "<mask token>\n\n\nclass ToolBusiness(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ToolBusiness(object):\n\n @classmethod\n def get_tool_ip(cls):\n ip = request.args.get('ip')\n url = 'http://api.map.baidu.com/location/ip'\n params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}\n ret = requests.get(url=url, params=params)\n ret = json.loads(ret.content)\n if ret and 'status' in ret and ret['status'\n ] == 0 and 'content' in ret and 'address' in ret:\n return ret['status'], ret['content'], ret['address'], 'ok'\n return 101, '', '', '获取失败'\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ToolBusiness(object):\n\n @classmethod\n def get_tool_ip(cls):\n ip = request.args.get('ip')\n url = 'http://api.map.baidu.com/location/ip'\n params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}\n ret = requests.get(url=url, params=params)\n ret = json.loads(ret.content)\n if ret and 'status' in ret and ret['status'\n ] == 0 and 'content' in ret and 'address' in ret:\n return ret['status'], ret['content'], ret['address'], 'ok'\n return 101, '', '', '获取失败'\n\n @classmethod\n def apk_analysis(cls, apk_download_url, type=1):\n try:\n target_path = '/tmp/packages/'\n if not os.path.exists(target_path):\n os.mkdir(target_path)\n date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')\n target_name = '{}.apk'.format(date_time_now)\n download_apk_name = os.path.join(target_path, target_name)\n current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url,\n download_apk_name))\n response = requests.get(url=apk_download_url, verify=False)\n with open(download_apk_name, 'wb') as f:\n f.write(response.content)\n time.sleep(0.5)\n if not os.path.exists(download_apk_name):\n current_app.logger.error('{} 下载失败!'.format(apk_download_url))\n return 102, '下载失败'\n current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))\n current_app.logger.info('开始分析')\n package_info_re = re.compile(\n \"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*\"\n , re.I)\n label_icon_re = re.compile(\"application: label='(.+)'.*icon='(.+)'\"\n , re.I)\n launchable_activity_re = re.compile(\n \"launchable-activity: name='(.+)'.*label.*\", re.I)\n apk_info = {}\n cmd = '/usr/local/bin/aapt dump badging {}'.format(\n download_apk_name)\n command_process = subprocess.Popen(cmd, shell=True, stdout=\n subprocess.PIPE, stderr=subprocess.STDOUT)\n infos = command_process.stdout.readlines()\n for info in infos:\n info = info.decode('utf-8')\n if info.startswith('package:'):\n temp = package_info_re.search(info)\n apk_info['package_name'] = temp.group(1)\n apk_info['version_code'] = temp.group(2) or 0\n apk_info['version_name'] = temp.group(3)\n elif info.startswith('application:'):\n temp = label_icon_re.search(info)\n apk_info['label'] = temp.group(1)\n apk_info['icon'] = temp.group(2)\n elif info.startswith('launchable-activity:'):\n temp = launchable_activity_re.search(info)\n apk_info['default_activity'] = temp.group(1)\n try:\n size = round(os.path.getsize(download_apk_name) / float(\n 1024 * 1024), 2)\n apk_info['size'] = str(size)\n zip = zipfile.ZipFile(download_apk_name)\n icon_binary = zip.read(apk_info['icon'])\n time_now = datetime.now().strftime('%Y%m%d.%H%M%S')\n picture = f'monkey-{time_now}.png'\n dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'\n if not os.path.exists(TCLOUD_FILE_TEMP_PATH):\n os.mkdir(TCLOUD_FILE_TEMP_PATH)\n if not os.path.exists(dir_path):\n os.mkdir(dir_path)\n with open(f'{dir_path}/{picture}', 'wb') as f:\n f.write(icon_binary)\n apk_info['icon'] = oss_upload_monkey_package_picture(dir_path,\n picture)\n except Exception as e:\n current_app.logger.warning(e)\n current_app.logger.warning(traceback.format_exc())\n current_app.logger.info(apk_info)\n if type == 1:\n pass\n elif type == 2:\n pass\n return apk_info\n except Exception as e:\n current_app.logger.error(e)\n current_app.logger.error(traceback.format_exc())\n return {}\n",
"step-4": "import base64\nimport json\nimport os\nimport re\nimport subprocess\nimport time\nimport traceback\nimport zipfile\nfrom datetime import datetime\nimport requests\nfrom flask import request, current_app\nfrom library.oss import oss_upload_monkey_package_picture\nfrom public_config import TCLOUD_FILE_TEMP_PATH\n\n\nclass ToolBusiness(object):\n\n @classmethod\n def get_tool_ip(cls):\n ip = request.args.get('ip')\n url = 'http://api.map.baidu.com/location/ip'\n params = {'ip': ip, 'ak': 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}\n ret = requests.get(url=url, params=params)\n ret = json.loads(ret.content)\n if ret and 'status' in ret and ret['status'\n ] == 0 and 'content' in ret and 'address' in ret:\n return ret['status'], ret['content'], ret['address'], 'ok'\n return 101, '', '', '获取失败'\n\n @classmethod\n def apk_analysis(cls, apk_download_url, type=1):\n try:\n target_path = '/tmp/packages/'\n if not os.path.exists(target_path):\n os.mkdir(target_path)\n date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')\n target_name = '{}.apk'.format(date_time_now)\n download_apk_name = os.path.join(target_path, target_name)\n current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url,\n download_apk_name))\n response = requests.get(url=apk_download_url, verify=False)\n with open(download_apk_name, 'wb') as f:\n f.write(response.content)\n time.sleep(0.5)\n if not os.path.exists(download_apk_name):\n current_app.logger.error('{} 下载失败!'.format(apk_download_url))\n return 102, '下载失败'\n current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))\n current_app.logger.info('开始分析')\n package_info_re = re.compile(\n \"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*\"\n , re.I)\n label_icon_re = re.compile(\"application: label='(.+)'.*icon='(.+)'\"\n , re.I)\n launchable_activity_re = re.compile(\n \"launchable-activity: name='(.+)'.*label.*\", re.I)\n apk_info = {}\n cmd = '/usr/local/bin/aapt dump badging {}'.format(\n download_apk_name)\n command_process = subprocess.Popen(cmd, shell=True, stdout=\n subprocess.PIPE, stderr=subprocess.STDOUT)\n infos = command_process.stdout.readlines()\n for info in infos:\n info = info.decode('utf-8')\n if info.startswith('package:'):\n temp = package_info_re.search(info)\n apk_info['package_name'] = temp.group(1)\n apk_info['version_code'] = temp.group(2) or 0\n apk_info['version_name'] = temp.group(3)\n elif info.startswith('application:'):\n temp = label_icon_re.search(info)\n apk_info['label'] = temp.group(1)\n apk_info['icon'] = temp.group(2)\n elif info.startswith('launchable-activity:'):\n temp = launchable_activity_re.search(info)\n apk_info['default_activity'] = temp.group(1)\n try:\n size = round(os.path.getsize(download_apk_name) / float(\n 1024 * 1024), 2)\n apk_info['size'] = str(size)\n zip = zipfile.ZipFile(download_apk_name)\n icon_binary = zip.read(apk_info['icon'])\n time_now = datetime.now().strftime('%Y%m%d.%H%M%S')\n picture = f'monkey-{time_now}.png'\n dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'\n if not os.path.exists(TCLOUD_FILE_TEMP_PATH):\n os.mkdir(TCLOUD_FILE_TEMP_PATH)\n if not os.path.exists(dir_path):\n os.mkdir(dir_path)\n with open(f'{dir_path}/{picture}', 'wb') as f:\n f.write(icon_binary)\n apk_info['icon'] = oss_upload_monkey_package_picture(dir_path,\n picture)\n except Exception as e:\n current_app.logger.warning(e)\n current_app.logger.warning(traceback.format_exc())\n current_app.logger.info(apk_info)\n if type == 1:\n pass\n elif type == 2:\n pass\n return apk_info\n except Exception as e:\n current_app.logger.error(e)\n current_app.logger.error(traceback.format_exc())\n return {}\n",
"step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\nimport base64\nimport json\nimport os\nimport re\nimport subprocess\nimport time\nimport traceback\nimport zipfile\nfrom datetime import datetime\n\nimport requests\nfrom flask import request, current_app\n\nfrom library.oss import oss_upload_monkey_package_picture\nfrom public_config import TCLOUD_FILE_TEMP_PATH\n\n\nclass ToolBusiness(object):\n\n @classmethod\n def get_tool_ip(cls):\n ip = request.args.get('ip')\n\n url = 'http://api.map.baidu.com/location/ip'\n params = {\"ip\": ip, \"ak\": 'kqCYLKt8Uz9VnvHBXA7uOI51FIrei0OM'}\n ret = requests.get(url=url, params=params)\n ret = json.loads(ret.content)\n\n if ret and 'status' in ret and ret['status'] == 0 and 'content' in ret and 'address' in ret:\n return ret['status'], ret['content'], ret['address'], 'ok'\n\n return 101, '', '', '获取失败'\n\n @classmethod\n def apk_analysis(cls, apk_download_url, type=1):\n try:\n # type 1 : not save , 2: save to db\n target_path = \"/tmp/packages/\"\n if not os.path.exists(target_path):\n os.mkdir(target_path)\n\n date_time_now = datetime.now().strftime('%Y%m%d-%H.%M.%S')\n target_name = '{}.apk'.format(date_time_now)\n\n download_apk_name = os.path.join(target_path, target_name)\n\n current_app.logger.info('开始从 {} 下载到 {}'.format(apk_download_url, download_apk_name))\n\n response = requests.get(url=apk_download_url, verify=False)\n\n with open(download_apk_name, 'wb') as f:\n f.write(response.content)\n\n time.sleep(0.5)\n # 下载失败\n if not os.path.exists(download_apk_name):\n current_app.logger.error('{} 下载失败!'.format(apk_download_url))\n return 102, \"下载失败\"\n\n current_app.logger.info('下载成功,保存地址 {}'.format(download_apk_name))\n current_app.logger.info('开始分析')\n\n package_info_re = re.compile(r\"package: name='(.*)' versionCode='(.*)' versionName='(.*?)'.*\", re.I)\n label_icon_re = re.compile(r\"application: label='(.+)'.*icon='(.+)'\", re.I)\n launchable_activity_re = re.compile(r\"launchable-activity: name='(.+)'.*label.*\", re.I)\n\n apk_info = {}\n\n cmd = '/usr/local/bin/aapt dump badging {}'.format(download_apk_name)\n\n command_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n\n infos = command_process.stdout.readlines()\n\n for info in infos:\n info = info.decode('utf-8')\n if info.startswith('package:'):\n temp = package_info_re.search(info)\n apk_info['package_name'] = temp.group(1)\n apk_info['version_code'] = temp.group(2) or 0\n apk_info['version_name'] = temp.group(3)\n elif info.startswith('application:'):\n temp = label_icon_re.search(info)\n apk_info['label'] = temp.group(1)\n apk_info['icon'] = temp.group(2)\n elif info.startswith('launchable-activity:'):\n temp = launchable_activity_re.search(info)\n apk_info['default_activity'] = temp.group(1)\n\n try:\n size = round(os.path.getsize(download_apk_name) / float(1024 * 1024), 2)\n apk_info['size'] = str(size)\n zip = zipfile.ZipFile(download_apk_name)\n icon_binary = zip.read(apk_info['icon'])\n time_now = datetime.now().strftime('%Y%m%d.%H%M%S')\n picture = f'monkey-{time_now}.png'\n dir_path = f'{TCLOUD_FILE_TEMP_PATH}/monkey'\n\n if not os.path.exists(TCLOUD_FILE_TEMP_PATH):\n os.mkdir(TCLOUD_FILE_TEMP_PATH)\n\n if not os.path.exists(dir_path):\n os.mkdir(dir_path)\n with open(f'{dir_path}/{picture}', 'wb') as f:\n f.write(icon_binary)\n\n apk_info['icon'] = oss_upload_monkey_package_picture(dir_path, picture)\n except Exception as e:\n current_app.logger.warning(e)\n current_app.logger.warning(traceback.format_exc())\n\n current_app.logger.info(apk_info)\n\n if type == 1:\n pass\n elif type == 2:\n pass\n\n return apk_info\n except Exception as e:\n current_app.logger.error(e)\n current_app.logger.error(traceback.format_exc())\n return {}\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
vocales = "aeiou"
resultado = []
frase = input("Por favor ingrese la frase que desea verificar").lower()
print(frase)
for vocal in vocales:
conteo_vocales = frase.count(vocal)
mensaje = (f"En la frase hay {conteo_vocales} veces, la vocal{vocal}")
resultado.append(mensaje)
for elemento in resultado:
print(elemento)
|
normal
|
{
"blob_id": "f0a03f9a6dc78d01455913f7db3ab1948b19ea63",
"index": 6250,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(frase)\nfor vocal in vocales:\n conteo_vocales = frase.count(vocal)\n mensaje = f'En la frase hay {conteo_vocales} veces, la vocal{vocal}'\n resultado.append(mensaje)\nfor elemento in resultado:\n print(elemento)\n",
"step-3": "vocales = 'aeiou'\nresultado = []\nfrase = input('Por favor ingrese la frase que desea verificar').lower()\nprint(frase)\nfor vocal in vocales:\n conteo_vocales = frase.count(vocal)\n mensaje = f'En la frase hay {conteo_vocales} veces, la vocal{vocal}'\n resultado.append(mensaje)\nfor elemento in resultado:\n print(elemento)\n",
"step-4": "vocales = \"aeiou\"\nresultado = []\n\nfrase = input(\"Por favor ingrese la frase que desea verificar\").lower()\nprint(frase)\n\nfor vocal in vocales:\n conteo_vocales = frase.count(vocal)\n mensaje = (f\"En la frase hay {conteo_vocales} veces, la vocal{vocal}\")\n resultado.append(mensaje)\n\nfor elemento in resultado:\n print(elemento)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from __future__ import annotations
from functools import cache
class Solution:
def countArrangement(self, n: int) -> int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i+1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n+1)))
class Solution:
def countArrangement(self, n: int) -> int:
# total number of bitset states possible
bitset_total = 2**n
dp = [[0 for _ in range(bitset_total)]
for _ in range(n+1)]
# all other valid states lead to this base case so mark this as 1
dp[0][0] = 1
# iterate over all positions
for i in range(1, n+1):
# iterate over all subsets
for bm in range(bitset_total):
# iterate over all numbers
for num in range(n):
# if number is not visited and satisfies condition in question
# & (各桁が両方とも1なら1になる)
# 1 << x (1を左にxシフトさせて右をゼロで埋める)
# ^ (XOR: 各桁の片方が1なら1になる)
if ((bm & (1 << num)) and
(((num+1) % i == 0) or
(i % (num+1) == 0))):
dp[i][bm] += dp[i-1][bm ^ (1 << num)]
return dp[-1][-1]
# bm is binary mask for visited numbers.
# i is current place we want to fill.
# Idea is to start from the end, and fill places in opposite direction,
# because for big numbers we potentially have less candidates.
# how dfs(bm, pl) will work:
# If we reached place 0 and procces was not interrupted so far,
# it means that we find beautiful arrangement.
# For each number 1, 2, ..., n we try to put this number on place pl:
# and we need to check two conditions: first, that this place is still empty,
# using bitmask and secondly that one of the two properties for beutiful arrangement
# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.
# Finally, we run dfs(0, n): from the last place and with empty bit-mask.
class Solution:
def countArrangement(self, n: int) -> int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num\
and ((num+1) % i == 0 or i % (num+1) == 0):
cnt += dfs(bm ^ 1 << num, i-1)
return cnt
return dfs(0, n)
# nums is the set of still available numbers.
# Note that my i goes downwards, from n to 1. Because position i = 1
# can hold any number, so I don't even have to check whether the last
# remaining number fits there. Also, position i = 2 happily holds
# every second number and i = 3 happily holds every third number,
# so filling the lowest positions last has a relatively high chance of success.
class Solution:
def countArrangement(self, n: int) -> int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i-1, nums-{num})
for num in nums
if num % i == 0 or i % num == 0)
return count(n, set(range(1, n+1)))
|
normal
|
{
"blob_id": "e6acc7b022001d8419095ad6364a6ae9504ec7aa",
"index": 508,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-3": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i + 1:])\n cache[perm] = cnt\n return cnt\n return helper(tuple(range(1, n + 1)))\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-5": "from __future__ import annotations\nfrom functools import cache\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i+1:])\n cache[perm] = cnt\n return cnt\n\n return helper(tuple(range(1, n+1)))\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n # total number of bitset states possible\n bitset_total = 2**n\n dp = [[0 for _ in range(bitset_total)]\n for _ in range(n+1)]\n # all other valid states lead to this base case so mark this as 1\n dp[0][0] = 1\n # iterate over all positions\n for i in range(1, n+1):\n # iterate over all subsets\n for bm in range(bitset_total):\n # iterate over all numbers\n for num in range(n):\n # if number is not visited and satisfies condition in question\n # & (各桁が両方とも1なら1になる)\n # 1 << x (1を左にxシフトさせて右をゼロで埋める)\n # ^ (XOR: 各桁の片方が1なら1になる)\n if ((bm & (1 << num)) and\n (((num+1) % i == 0) or\n (i % (num+1) == 0))):\n dp[i][bm] += dp[i-1][bm ^ (1 << num)]\n return dp[-1][-1]\n\n\n# bm is binary mask for visited numbers.\n# i is current place we want to fill. \n# Idea is to start from the end, and fill places in opposite direction,\n# because for big numbers we potentially have less candidates.\n# how dfs(bm, pl) will work:\n# If we reached place 0 and procces was not interrupted so far,\n# it means that we find beautiful arrangement.\n# For each number 1, 2, ..., n we try to put this number on place pl:\n# and we need to check two conditions: first, that this place is still empty,\n# using bitmask and secondly that one of the two properties for beutiful arrangement\n# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.\n# Finally, we run dfs(0, n): from the last place and with empty bit-mask.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n\n cnt = 0\n for num in range(n):\n if not bm & 1 << num\\\n and ((num+1) % i == 0 or i % (num+1) == 0):\n cnt += dfs(bm ^ 1 << num, i-1)\n return cnt\n\n return dfs(0, n)\n\n\n# nums is the set of still available numbers.\n# Note that my i goes downwards, from n to 1. Because position i = 1\n# can hold any number, so I don't even have to check whether the last\n# remaining number fits there. Also, position i = 2 happily holds\n# every second number and i = 3 happily holds every third number,\n# so filling the lowest positions last has a relatively high chance of success.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i-1, nums-{num})\n for num in nums\n if num % i == 0 or i % num == 0)\n return count(n, set(range(1, n+1)))\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):
a = random.randint(1, maxHeight)
b = random.randint(1, maxRadius)
slopingHeight = math.sqrt(a ** 2 + b ** 2)
problem = (
f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'
)
ans = int(math.pi * b * slopingHeight + math.pi * b * b)
solution = f'{ans} {unit}^2'
return problem, solution
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):
a = random.randint(1, maxHeight)
b = random.randint(1, maxRadius)
slopingHeight = math.sqrt(a ** 2 + b ** 2)
problem = (
f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'
)
ans = int(math.pi * b * slopingHeight + math.pi * b * b)
solution = f'{ans} {unit}^2'
return problem, solution
surface_area_cone = Generator('Surface Area of cone', 38,
'Surface area of cone with height = a units and radius = b units is',
'c units^2', surfaceAreaCone)
<|reserved_special_token_1|>
from .__init__ import *
def surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):
a = random.randint(1, maxHeight)
b = random.randint(1, maxRadius)
slopingHeight = math.sqrt(a ** 2 + b ** 2)
problem = (
f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'
)
ans = int(math.pi * b * slopingHeight + math.pi * b * b)
solution = f'{ans} {unit}^2'
return problem, solution
surface_area_cone = Generator('Surface Area of cone', 38,
'Surface area of cone with height = a units and radius = b units is',
'c units^2', surfaceAreaCone)
<|reserved_special_token_1|>
from .__init__ import *
def surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):
a = random.randint(1, maxHeight)
b = random.randint(1, maxRadius)
slopingHeight = math.sqrt(a**2 + b**2)
problem = f"Surface area of cone with height = {a}{unit} and radius = {b}{unit} is"
ans = int(math.pi * b * slopingHeight + math.pi * b * b)
solution = f"{ans} {unit}^2"
return problem, solution
surface_area_cone = Generator(
"Surface Area of cone", 38,
"Surface area of cone with height = a units and radius = b units is",
"c units^2", surfaceAreaCone)
|
flexible
|
{
"blob_id": "3e19ede2112a109a776b607e927e2f0a095ba5cc",
"index": 7677,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):\n a = random.randint(1, maxHeight)\n b = random.randint(1, maxRadius)\n slopingHeight = math.sqrt(a ** 2 + b ** 2)\n problem = (\n f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'\n )\n ans = int(math.pi * b * slopingHeight + math.pi * b * b)\n solution = f'{ans} {unit}^2'\n return problem, solution\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):\n a = random.randint(1, maxHeight)\n b = random.randint(1, maxRadius)\n slopingHeight = math.sqrt(a ** 2 + b ** 2)\n problem = (\n f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'\n )\n ans = int(math.pi * b * slopingHeight + math.pi * b * b)\n solution = f'{ans} {unit}^2'\n return problem, solution\n\n\nsurface_area_cone = Generator('Surface Area of cone', 38,\n 'Surface area of cone with height = a units and radius = b units is',\n 'c units^2', surfaceAreaCone)\n",
"step-4": "from .__init__ import *\n\n\ndef surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):\n a = random.randint(1, maxHeight)\n b = random.randint(1, maxRadius)\n slopingHeight = math.sqrt(a ** 2 + b ** 2)\n problem = (\n f'Surface area of cone with height = {a}{unit} and radius = {b}{unit} is'\n )\n ans = int(math.pi * b * slopingHeight + math.pi * b * b)\n solution = f'{ans} {unit}^2'\n return problem, solution\n\n\nsurface_area_cone = Generator('Surface Area of cone', 38,\n 'Surface area of cone with height = a units and radius = b units is',\n 'c units^2', surfaceAreaCone)\n",
"step-5": "from .__init__ import *\n\n\ndef surfaceAreaCone(maxRadius=20, maxHeight=50, unit='m'):\n a = random.randint(1, maxHeight)\n b = random.randint(1, maxRadius)\n\n slopingHeight = math.sqrt(a**2 + b**2)\n problem = f\"Surface area of cone with height = {a}{unit} and radius = {b}{unit} is\"\n ans = int(math.pi * b * slopingHeight + math.pi * b * b)\n\n solution = f\"{ans} {unit}^2\"\n return problem, solution\n\n\nsurface_area_cone = Generator(\n \"Surface Area of cone\", 38,\n \"Surface area of cone with height = a units and radius = b units is\",\n \"c units^2\", surfaceAreaCone)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UploadFileForm(forms.ModelForm):
class Meta:
model = Submit
fields = ['email', 'student_no', 'file']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UploadFileForm(forms.ModelForm):
class Meta:
model = Submit
fields = ['email', 'student_no', 'file']
@csrf_exempt
def upload(request):
frm = UploadFileForm(request.POST, request.FILES)
if not frm.is_valid():
return JsonResponse({'error': frm.errors})
submit = frm.save(commit=False)
submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')
submit.time = time.time()
submit.save()
res = JsonResponse({'success': True})
if 'application/json' not in request.META['HTTP_ACCEPT']:
res['Content-Type'] = 'text/plain'
return res
<|reserved_special_token_1|>
from django import forms
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
import time
from page.models import Submit, Assignment
class UploadFileForm(forms.ModelForm):
class Meta:
model = Submit
fields = ['email', 'student_no', 'file']
@csrf_exempt
def upload(request):
frm = UploadFileForm(request.POST, request.FILES)
if not frm.is_valid():
return JsonResponse({'error': frm.errors})
submit = frm.save(commit=False)
submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')
submit.time = time.time()
submit.save()
res = JsonResponse({'success': True})
if 'application/json' not in request.META['HTTP_ACCEPT']:
res['Content-Type'] = 'text/plain'
return res
<|reserved_special_token_1|>
from django import forms
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
import time
from page.models import Submit, Assignment
class UploadFileForm(forms.ModelForm):
class Meta:
model = Submit
fields = ['email', 'student_no', 'file']
@csrf_exempt
def upload(request):
# TODO: check file size and type
frm = UploadFileForm(request.POST, request.FILES)
if not frm.is_valid():
return JsonResponse({'error': frm.errors})
submit = frm.save(commit=False)
submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')
submit.time = time.time()
submit.save()
res = JsonResponse({'success': True})
if 'application/json' not in request.META['HTTP_ACCEPT']:
# INTERNET EXPLORER!!
res['Content-Type'] = 'text/plain'
return res
|
flexible
|
{
"blob_id": "dabc38db6a5c4d97e18be2edc9d4c6203e264741",
"index": 3849,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass UploadFileForm(forms.ModelForm):\n\n\n class Meta:\n model = Submit\n fields = ['email', 'student_no', 'file']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass UploadFileForm(forms.ModelForm):\n\n\n class Meta:\n model = Submit\n fields = ['email', 'student_no', 'file']\n\n\n@csrf_exempt\ndef upload(request):\n frm = UploadFileForm(request.POST, request.FILES)\n if not frm.is_valid():\n return JsonResponse({'error': frm.errors})\n submit = frm.save(commit=False)\n submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')\n submit.time = time.time()\n submit.save()\n res = JsonResponse({'success': True})\n if 'application/json' not in request.META['HTTP_ACCEPT']:\n res['Content-Type'] = 'text/plain'\n return res\n",
"step-4": "from django import forms\nfrom django.http import JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nimport time\nfrom page.models import Submit, Assignment\n\n\nclass UploadFileForm(forms.ModelForm):\n\n\n class Meta:\n model = Submit\n fields = ['email', 'student_no', 'file']\n\n\n@csrf_exempt\ndef upload(request):\n frm = UploadFileForm(request.POST, request.FILES)\n if not frm.is_valid():\n return JsonResponse({'error': frm.errors})\n submit = frm.save(commit=False)\n submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')\n submit.time = time.time()\n submit.save()\n res = JsonResponse({'success': True})\n if 'application/json' not in request.META['HTTP_ACCEPT']:\n res['Content-Type'] = 'text/plain'\n return res\n",
"step-5": "from django import forms\nfrom django.http import JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nimport time\nfrom page.models import Submit, Assignment\n\n\nclass UploadFileForm(forms.ModelForm):\n class Meta:\n model = Submit\n fields = ['email', 'student_no', 'file']\n\n\n@csrf_exempt\ndef upload(request):\n # TODO: check file size and type\n frm = UploadFileForm(request.POST, request.FILES)\n if not frm.is_valid():\n return JsonResponse({'error': frm.errors})\n\n submit = frm.save(commit=False)\n submit.assignment, _ = Assignment.objects.get_or_create(name='HW3')\n submit.time = time.time()\n submit.save()\n\n res = JsonResponse({'success': True})\n if 'application/json' not in request.META['HTTP_ACCEPT']:\n # INTERNET EXPLORER!!\n res['Content-Type'] = 'text/plain'\n return res\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import unittest
from traceback import print_tb
from ml_base.utilities.model_manager import ModelManager
from tests.mocks import MLModelMock
class ModelManagerTests(unittest.TestCase):
def test_model_manager_will_return_same_instance_when_instantiated_many_times(self):
"""Testing that the ModelManager will return the same instance of an MLModel class from several different
references of ModelManager."""
# arrange, act
# instantiating the model manager class twice
first_model_manager = ModelManager()
second_model_manager = ModelManager()
# loading the MLModel objects from configuration
first_model_manager.load_model("tests.mocks.MLModelMock")
first_model_object = first_model_manager.get_model(qualified_name="qualified_name")
second_model_object = second_model_manager.get_model(qualified_name="qualified_name")
# assert
self.assertTrue(str(first_model_manager) == str(second_model_manager))
self.assertTrue(str(first_model_object) == str(second_model_object))
def test_load_model_method(self):
"""Testing the load_model() method."""
# arrange
# instantiating the model manager class
model_manager = ModelManager()
# adding the model
model_manager.load_model("tests.mocks.MLModelMock")
# act
exception_raised = False
model_object = None
# accessing the MLModelMock model object
try:
model_object = model_manager.get_model(qualified_name="qualified_name")
except Exception as e:
exception_raised = True
print_tb(e)
# assert
self.assertFalse(exception_raised)
self.assertTrue(model_object is not None)
def test_load_model_method_with_wrong_class_path(self):
"""Testing the load_model() method."""
# arrange
# instantiating the model manager class
model_manager = ModelManager()
# act
# adding the model
exception_raised = False
exception_message = None
# accessing the MLModelMock model object
try:
model_manager.load_model("sdf.sdf.sdf")
except Exception as e:
exception_raised = True
exception_message = str(e)
# assert
self.assertTrue(exception_raised)
self.assertTrue(exception_message == "No module named 'sdf'")
def test_only_ml_model_instances_allowed_to_be_stored(self):
"""Testing that the ModelManager only allows MLModel objects to be stored."""
# arrange
model_manager = ModelManager()
# act
exception_raised = False
exception_message = ""
try:
model_manager.load_model("tests.mocks.SomeClass")
except Exception as e:
exception_raised = True
exception_message = str(e)
# assert
self.assertTrue(exception_raised)
self.assertTrue(exception_message == "ModelManager instance can only hold references to objects of type MLModel.")
def test_model_manager_does_not_allow_duplicate_qualified_names(self):
"""Testing that the ModelManager does not allow duplicate qualified names in the singleton."""
# arrange
model_manager = ModelManager()
# act
# loading the first instance of the model object
model_manager.load_model("tests.mocks.MLModelMock")
exception_raised = False
exception_message = ""
try:
# loading it again
model_manager.load_model("tests.mocks.MLModelMock")
except Exception as e:
exception_raised = True
exception_message = str(e)
# assert
self.assertTrue(exception_raised)
self.assertTrue(exception_message == "A model with the same qualified name is already in the ModelManager singleton.")
def test_remove_model_method(self):
"""Testing the remove_model() method."""
# arrange
# instantiating the model manager class
model_manager = ModelManager()
# adding the model
model_manager.load_model("tests.mocks.MLModelMock")
# act
exception_raised1 = False
# accessing the MLModelMock model object
try:
model_manager.remove_model(qualified_name="qualified_name")
except Exception as e:
exception_raised1 = True
exception_raised2 = False
exception_message2 = ""
# trying to access the model that was removed
try:
model = model_manager.get_model(qualified_name="qualified_name")
except Exception as e:
exception_raised2 = True
exception_message2 = str(e)
# assert
self.assertFalse(exception_raised1)
self.assertTrue(exception_raised2)
self.assertTrue(exception_message2 == "Instance of model 'qualified_name' not found in ModelManager.")
def test_remove_model_method_with_missing_model(self):
"""Testing that the ModelManager raises ValueError exception when removing a model that is not found."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
exception_raised = False
exception_message = ""
try:
model_manager.remove_model(qualified_name="asdf")
except Exception as e:
exception_raised = True
exception_message = str(e)
# assert
self.assertTrue(exception_raised)
self.assertTrue(exception_message == "Instance of model 'asdf' not found in ModelManager.")
def test_get_models_method(self):
"""Testing get_models method."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
models = model_manager.get_models()
# assert
self.assertTrue(models[0]["display_name"] == "display_name")
self.assertTrue(models[0]["qualified_name"] == "qualified_name")
self.assertTrue(models[0]["description"] == "description")
self.assertTrue(models[0]["version"] == "1.0.0")
def test_get_model_metadata_method(self):
"""Testing get_model_metadata method."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
model_metadata = model_manager.get_model_metadata(qualified_name="qualified_name")
# assert
self.assertTrue(model_metadata["display_name"] == "display_name")
self.assertTrue(model_metadata["qualified_name"] == "qualified_name")
self.assertTrue(model_metadata["description"] == "description")
self.assertTrue(model_metadata["version"] == "1.0.0")
self.assertTrue(type(model_metadata["input_schema"]) is dict)
self.assertTrue(type(model_metadata["output_schema"]) is dict)
def test_get_model_metadata_method_with_missing_model(self):
"""Testing get_model_metadata method with missing model."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
excpeption_raised = False
exception_message = None
try:
model_metadata = model_manager.get_model_metadata(qualified_name="asdf")
except Exception as e:
excpeption_raised = True
exception_message = str(e)
# assert
self.assertTrue(excpeption_raised)
self.assertTrue(exception_message == "Instance of model 'asdf' not found in ModelManager.")
def test_get_model_method(self):
"""Testing the get_model method."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
exception_raised = False
model = None
try:
model = model_manager.get_model(qualified_name="qualified_name")
except Exception as e:
exception_raised = True
# assert
self.assertFalse(exception_raised)
self.assertTrue(type(model) is MLModelMock)
def test_get_model_method_with_missing_model(self):
"""Testing that the ModelManager raises ValueError exception when a model is not found."""
# arrange
model_manager = ModelManager()
model_manager.load_model("tests.mocks.MLModelMock")
# act
exception_raised = False
exception_message = ""
model = None
try:
model = model_manager.get_model(qualified_name="asdf")
except Exception as e:
exception_raised = True
exception_message = str(e)
# assert
self.assertTrue(exception_raised)
self.assertTrue(exception_message == "Instance of model 'asdf' not found in ModelManager.")
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "8355faf7c0d3742be34a56ddc982cb389c80d0a9",
"index": 1063,
"step-1": "<mask token>\n\n\nclass ModelManagerTests(unittest.TestCase):\n\n def test_model_manager_will_return_same_instance_when_instantiated_many_times(\n self):\n \"\"\"Testing that the ModelManager will return the same instance of an MLModel class from several different\n references of ModelManager.\"\"\"\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n first_model_manager.load_model('tests.mocks.MLModelMock')\n first_model_object = first_model_manager.get_model(qualified_name=\n 'qualified_name')\n second_model_object = second_model_manager.get_model(qualified_name\n ='qualified_name')\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))\n\n def test_load_model_method(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model_object = None\n try:\n model_object = model_manager.get_model(qualified_name=\n 'qualified_name')\n except Exception as e:\n exception_raised = True\n print_tb(e)\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)\n <mask token>\n\n def test_only_ml_model_instances_allowed_to_be_stored(self):\n \"\"\"Testing that the ModelManager only allows MLModel objects to be stored.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.SomeClass')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'ModelManager instance can only hold references to objects of type MLModel.'\n )\n\n def test_model_manager_does_not_allow_duplicate_qualified_names(self):\n \"\"\"Testing that the ModelManager does not allow duplicate qualified names in the singleton.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.MLModelMock')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'A model with the same qualified name is already in the ModelManager singleton.'\n )\n\n def test_remove_model_method(self):\n \"\"\"Testing the remove_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised1 = False\n try:\n model_manager.remove_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised1 = True\n exception_raised2 = False\n exception_message2 = ''\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised2 = True\n exception_message2 = str(e)\n self.assertFalse(exception_raised1)\n self.assertTrue(exception_raised2)\n self.assertTrue(exception_message2 ==\n \"Instance of model 'qualified_name' not found in ModelManager.\")\n <mask token>\n <mask token>\n\n def test_get_model_metadata_method(self):\n \"\"\"Testing get_model_metadata method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n model_metadata = model_manager.get_model_metadata(qualified_name=\n 'qualified_name')\n self.assertTrue(model_metadata['display_name'] == 'display_name')\n self.assertTrue(model_metadata['qualified_name'] == 'qualified_name')\n self.assertTrue(model_metadata['description'] == 'description')\n self.assertTrue(model_metadata['version'] == '1.0.0')\n self.assertTrue(type(model_metadata['input_schema']) is dict)\n self.assertTrue(type(model_metadata['output_schema']) is dict)\n\n def test_get_model_metadata_method_with_missing_model(self):\n \"\"\"Testing get_model_metadata method with missing model.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n excpeption_raised = False\n exception_message = None\n try:\n model_metadata = model_manager.get_model_metadata(qualified_name\n ='asdf')\n except Exception as e:\n excpeption_raised = True\n exception_message = str(e)\n self.assertTrue(excpeption_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n <mask token>\n\n def test_get_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when a model is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n model = None\n try:\n model = model_manager.get_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ModelManagerTests(unittest.TestCase):\n\n def test_model_manager_will_return_same_instance_when_instantiated_many_times(\n self):\n \"\"\"Testing that the ModelManager will return the same instance of an MLModel class from several different\n references of ModelManager.\"\"\"\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n first_model_manager.load_model('tests.mocks.MLModelMock')\n first_model_object = first_model_manager.get_model(qualified_name=\n 'qualified_name')\n second_model_object = second_model_manager.get_model(qualified_name\n ='qualified_name')\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))\n\n def test_load_model_method(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model_object = None\n try:\n model_object = model_manager.get_model(qualified_name=\n 'qualified_name')\n except Exception as e:\n exception_raised = True\n print_tb(e)\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)\n\n def test_load_model_method_with_wrong_class_path(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = None\n try:\n model_manager.load_model('sdf.sdf.sdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"No module named 'sdf'\")\n\n def test_only_ml_model_instances_allowed_to_be_stored(self):\n \"\"\"Testing that the ModelManager only allows MLModel objects to be stored.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.SomeClass')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'ModelManager instance can only hold references to objects of type MLModel.'\n )\n\n def test_model_manager_does_not_allow_duplicate_qualified_names(self):\n \"\"\"Testing that the ModelManager does not allow duplicate qualified names in the singleton.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.MLModelMock')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'A model with the same qualified name is already in the ModelManager singleton.'\n )\n\n def test_remove_model_method(self):\n \"\"\"Testing the remove_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised1 = False\n try:\n model_manager.remove_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised1 = True\n exception_raised2 = False\n exception_message2 = ''\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised2 = True\n exception_message2 = str(e)\n self.assertFalse(exception_raised1)\n self.assertTrue(exception_raised2)\n self.assertTrue(exception_message2 ==\n \"Instance of model 'qualified_name' not found in ModelManager.\")\n\n def test_remove_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when removing a model that is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.remove_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_models_method(self):\n \"\"\"Testing get_models method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n models = model_manager.get_models()\n self.assertTrue(models[0]['display_name'] == 'display_name')\n self.assertTrue(models[0]['qualified_name'] == 'qualified_name')\n self.assertTrue(models[0]['description'] == 'description')\n self.assertTrue(models[0]['version'] == '1.0.0')\n\n def test_get_model_metadata_method(self):\n \"\"\"Testing get_model_metadata method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n model_metadata = model_manager.get_model_metadata(qualified_name=\n 'qualified_name')\n self.assertTrue(model_metadata['display_name'] == 'display_name')\n self.assertTrue(model_metadata['qualified_name'] == 'qualified_name')\n self.assertTrue(model_metadata['description'] == 'description')\n self.assertTrue(model_metadata['version'] == '1.0.0')\n self.assertTrue(type(model_metadata['input_schema']) is dict)\n self.assertTrue(type(model_metadata['output_schema']) is dict)\n\n def test_get_model_metadata_method_with_missing_model(self):\n \"\"\"Testing get_model_metadata method with missing model.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n excpeption_raised = False\n exception_message = None\n try:\n model_metadata = model_manager.get_model_metadata(qualified_name\n ='asdf')\n except Exception as e:\n excpeption_raised = True\n exception_message = str(e)\n self.assertTrue(excpeption_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_model_method(self):\n \"\"\"Testing the get_model method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised = True\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)\n\n def test_get_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when a model is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n model = None\n try:\n model = model_manager.get_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ModelManagerTests(unittest.TestCase):\n\n def test_model_manager_will_return_same_instance_when_instantiated_many_times(\n self):\n \"\"\"Testing that the ModelManager will return the same instance of an MLModel class from several different\n references of ModelManager.\"\"\"\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n first_model_manager.load_model('tests.mocks.MLModelMock')\n first_model_object = first_model_manager.get_model(qualified_name=\n 'qualified_name')\n second_model_object = second_model_manager.get_model(qualified_name\n ='qualified_name')\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))\n\n def test_load_model_method(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model_object = None\n try:\n model_object = model_manager.get_model(qualified_name=\n 'qualified_name')\n except Exception as e:\n exception_raised = True\n print_tb(e)\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)\n\n def test_load_model_method_with_wrong_class_path(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = None\n try:\n model_manager.load_model('sdf.sdf.sdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"No module named 'sdf'\")\n\n def test_only_ml_model_instances_allowed_to_be_stored(self):\n \"\"\"Testing that the ModelManager only allows MLModel objects to be stored.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.SomeClass')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'ModelManager instance can only hold references to objects of type MLModel.'\n )\n\n def test_model_manager_does_not_allow_duplicate_qualified_names(self):\n \"\"\"Testing that the ModelManager does not allow duplicate qualified names in the singleton.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.MLModelMock')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'A model with the same qualified name is already in the ModelManager singleton.'\n )\n\n def test_remove_model_method(self):\n \"\"\"Testing the remove_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised1 = False\n try:\n model_manager.remove_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised1 = True\n exception_raised2 = False\n exception_message2 = ''\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised2 = True\n exception_message2 = str(e)\n self.assertFalse(exception_raised1)\n self.assertTrue(exception_raised2)\n self.assertTrue(exception_message2 ==\n \"Instance of model 'qualified_name' not found in ModelManager.\")\n\n def test_remove_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when removing a model that is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.remove_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_models_method(self):\n \"\"\"Testing get_models method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n models = model_manager.get_models()\n self.assertTrue(models[0]['display_name'] == 'display_name')\n self.assertTrue(models[0]['qualified_name'] == 'qualified_name')\n self.assertTrue(models[0]['description'] == 'description')\n self.assertTrue(models[0]['version'] == '1.0.0')\n\n def test_get_model_metadata_method(self):\n \"\"\"Testing get_model_metadata method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n model_metadata = model_manager.get_model_metadata(qualified_name=\n 'qualified_name')\n self.assertTrue(model_metadata['display_name'] == 'display_name')\n self.assertTrue(model_metadata['qualified_name'] == 'qualified_name')\n self.assertTrue(model_metadata['description'] == 'description')\n self.assertTrue(model_metadata['version'] == '1.0.0')\n self.assertTrue(type(model_metadata['input_schema']) is dict)\n self.assertTrue(type(model_metadata['output_schema']) is dict)\n\n def test_get_model_metadata_method_with_missing_model(self):\n \"\"\"Testing get_model_metadata method with missing model.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n excpeption_raised = False\n exception_message = None\n try:\n model_metadata = model_manager.get_model_metadata(qualified_name\n ='asdf')\n except Exception as e:\n excpeption_raised = True\n exception_message = str(e)\n self.assertTrue(excpeption_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_model_method(self):\n \"\"\"Testing the get_model method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised = True\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)\n\n def test_get_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when a model is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n model = None\n try:\n model = model_manager.get_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nfrom traceback import print_tb\nfrom ml_base.utilities.model_manager import ModelManager\nfrom tests.mocks import MLModelMock\n\n\nclass ModelManagerTests(unittest.TestCase):\n\n def test_model_manager_will_return_same_instance_when_instantiated_many_times(\n self):\n \"\"\"Testing that the ModelManager will return the same instance of an MLModel class from several different\n references of ModelManager.\"\"\"\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n first_model_manager.load_model('tests.mocks.MLModelMock')\n first_model_object = first_model_manager.get_model(qualified_name=\n 'qualified_name')\n second_model_object = second_model_manager.get_model(qualified_name\n ='qualified_name')\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))\n\n def test_load_model_method(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model_object = None\n try:\n model_object = model_manager.get_model(qualified_name=\n 'qualified_name')\n except Exception as e:\n exception_raised = True\n print_tb(e)\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)\n\n def test_load_model_method_with_wrong_class_path(self):\n \"\"\"Testing the load_model() method.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = None\n try:\n model_manager.load_model('sdf.sdf.sdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"No module named 'sdf'\")\n\n def test_only_ml_model_instances_allowed_to_be_stored(self):\n \"\"\"Testing that the ModelManager only allows MLModel objects to be stored.\"\"\"\n model_manager = ModelManager()\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.SomeClass')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'ModelManager instance can only hold references to objects of type MLModel.'\n )\n\n def test_model_manager_does_not_allow_duplicate_qualified_names(self):\n \"\"\"Testing that the ModelManager does not allow duplicate qualified names in the singleton.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.load_model('tests.mocks.MLModelMock')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n 'A model with the same qualified name is already in the ModelManager singleton.'\n )\n\n def test_remove_model_method(self):\n \"\"\"Testing the remove_model() method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised1 = False\n try:\n model_manager.remove_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised1 = True\n exception_raised2 = False\n exception_message2 = ''\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised2 = True\n exception_message2 = str(e)\n self.assertFalse(exception_raised1)\n self.assertTrue(exception_raised2)\n self.assertTrue(exception_message2 ==\n \"Instance of model 'qualified_name' not found in ModelManager.\")\n\n def test_remove_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when removing a model that is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n try:\n model_manager.remove_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_models_method(self):\n \"\"\"Testing get_models method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n models = model_manager.get_models()\n self.assertTrue(models[0]['display_name'] == 'display_name')\n self.assertTrue(models[0]['qualified_name'] == 'qualified_name')\n self.assertTrue(models[0]['description'] == 'description')\n self.assertTrue(models[0]['version'] == '1.0.0')\n\n def test_get_model_metadata_method(self):\n \"\"\"Testing get_model_metadata method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n model_metadata = model_manager.get_model_metadata(qualified_name=\n 'qualified_name')\n self.assertTrue(model_metadata['display_name'] == 'display_name')\n self.assertTrue(model_metadata['qualified_name'] == 'qualified_name')\n self.assertTrue(model_metadata['description'] == 'description')\n self.assertTrue(model_metadata['version'] == '1.0.0')\n self.assertTrue(type(model_metadata['input_schema']) is dict)\n self.assertTrue(type(model_metadata['output_schema']) is dict)\n\n def test_get_model_metadata_method_with_missing_model(self):\n \"\"\"Testing get_model_metadata method with missing model.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n excpeption_raised = False\n exception_message = None\n try:\n model_metadata = model_manager.get_model_metadata(qualified_name\n ='asdf')\n except Exception as e:\n excpeption_raised = True\n exception_message = str(e)\n self.assertTrue(excpeption_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_model_method(self):\n \"\"\"Testing the get_model method.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name='qualified_name')\n except Exception as e:\n exception_raised = True\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)\n\n def test_get_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when a model is not found.\"\"\"\n model_manager = ModelManager()\n model_manager.load_model('tests.mocks.MLModelMock')\n exception_raised = False\n exception_message = ''\n model = None\n try:\n model = model_manager.get_model(qualified_name='asdf')\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message ==\n \"Instance of model 'asdf' not found in ModelManager.\")\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import unittest\nfrom traceback import print_tb\n\nfrom ml_base.utilities.model_manager import ModelManager\nfrom tests.mocks import MLModelMock\n\n\nclass ModelManagerTests(unittest.TestCase):\n\n def test_model_manager_will_return_same_instance_when_instantiated_many_times(self):\n \"\"\"Testing that the ModelManager will return the same instance of an MLModel class from several different\n references of ModelManager.\"\"\"\n # arrange, act\n # instantiating the model manager class twice\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n\n # loading the MLModel objects from configuration\n first_model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n first_model_object = first_model_manager.get_model(qualified_name=\"qualified_name\")\n second_model_object = second_model_manager.get_model(qualified_name=\"qualified_name\")\n\n # assert\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))\n\n def test_load_model_method(self):\n \"\"\"Testing the load_model() method.\"\"\"\n # arrange\n # instantiating the model manager class\n model_manager = ModelManager()\n\n # adding the model\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model_object = None\n # accessing the MLModelMock model object\n try:\n model_object = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n print_tb(e)\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)\n\n def test_load_model_method_with_wrong_class_path(self):\n \"\"\"Testing the load_model() method.\"\"\"\n # arrange\n # instantiating the model manager class\n model_manager = ModelManager()\n\n # act\n # adding the model\n exception_raised = False\n exception_message = None\n # accessing the MLModelMock model object\n try:\n model_manager.load_model(\"sdf.sdf.sdf\")\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"No module named 'sdf'\")\n\n def test_only_ml_model_instances_allowed_to_be_stored(self):\n \"\"\"Testing that the ModelManager only allows MLModel objects to be stored.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n # act\n exception_raised = False\n exception_message = \"\"\n try:\n model_manager.load_model(\"tests.mocks.SomeClass\")\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"ModelManager instance can only hold references to objects of type MLModel.\")\n\n def test_model_manager_does_not_allow_duplicate_qualified_names(self):\n \"\"\"Testing that the ModelManager does not allow duplicate qualified names in the singleton.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n # act\n # loading the first instance of the model object\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n exception_raised = False\n exception_message = \"\"\n try:\n # loading it again\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"A model with the same qualified name is already in the ModelManager singleton.\")\n\n def test_remove_model_method(self):\n \"\"\"Testing the remove_model() method.\"\"\"\n # arrange\n # instantiating the model manager class\n model_manager = ModelManager()\n\n # adding the model\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised1 = False\n # accessing the MLModelMock model object\n try:\n model_manager.remove_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised1 = True\n\n exception_raised2 = False\n exception_message2 = \"\"\n # trying to access the model that was removed\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised2 = True\n exception_message2 = str(e)\n\n # assert\n self.assertFalse(exception_raised1)\n self.assertTrue(exception_raised2)\n self.assertTrue(exception_message2 == \"Instance of model 'qualified_name' not found in ModelManager.\")\n\n def test_remove_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when removing a model that is not found.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n exception_message = \"\"\n try:\n model_manager.remove_model(qualified_name=\"asdf\")\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_models_method(self):\n \"\"\"Testing get_models method.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n models = model_manager.get_models()\n\n # assert\n self.assertTrue(models[0][\"display_name\"] == \"display_name\")\n self.assertTrue(models[0][\"qualified_name\"] == \"qualified_name\")\n self.assertTrue(models[0][\"description\"] == \"description\")\n self.assertTrue(models[0][\"version\"] == \"1.0.0\")\n\n def test_get_model_metadata_method(self):\n \"\"\"Testing get_model_metadata method.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n model_metadata = model_manager.get_model_metadata(qualified_name=\"qualified_name\")\n\n # assert\n self.assertTrue(model_metadata[\"display_name\"] == \"display_name\")\n self.assertTrue(model_metadata[\"qualified_name\"] == \"qualified_name\")\n self.assertTrue(model_metadata[\"description\"] == \"description\")\n self.assertTrue(model_metadata[\"version\"] == \"1.0.0\")\n self.assertTrue(type(model_metadata[\"input_schema\"]) is dict)\n self.assertTrue(type(model_metadata[\"output_schema\"]) is dict)\n\n def test_get_model_metadata_method_with_missing_model(self):\n \"\"\"Testing get_model_metadata method with missing model.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n excpeption_raised = False\n exception_message = None\n try:\n model_metadata = model_manager.get_model_metadata(qualified_name=\"asdf\")\n except Exception as e:\n excpeption_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(excpeption_raised)\n self.assertTrue(exception_message == \"Instance of model 'asdf' not found in ModelManager.\")\n\n def test_get_model_method(self):\n \"\"\"Testing the get_model method.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)\n\n def test_get_model_method_with_missing_model(self):\n \"\"\"Testing that the ModelManager raises ValueError exception when a model is not found.\"\"\"\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n exception_message = \"\"\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"asdf\")\n except Exception as e:\n exception_raised = True\n exception_message = str(e)\n\n # assert\n self.assertTrue(exception_raised)\n self.assertTrue(exception_message == \"Instance of model 'asdf' not found in ModelManager.\")\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
9,
13,
14,
15,
16
]
}
|
[
9,
13,
14,
15,
16
] |
n=int(input("please enter the number : "))
for i in range(11):
print(n," X ",i," = ",n*i)
|
normal
|
{
"blob_id": "ea4a55ed17c5cc2c6f127112af636ca885159c86",
"index": 5768,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(11):\n print(n, ' X ', i, ' = ', n * i)\n",
"step-3": "n = int(input('please enter the number : '))\nfor i in range(11):\n print(n, ' X ', i, ' = ', n * i)\n",
"step-4": "n=int(input(\"please enter the number : \"))\nfor i in range(11):\n print(n,\" X \",i,\" = \",n*i)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def busca_dfs(nodo_inicial, custo_maximo_atual):
objetivo = '12345678_'
custo_maximo_absoluto = 100
explorados = set()
fronteira = deque()
fronteira.append(nodo_inicial)
if custo_maximo_atual > custo_maximo_absoluto:
return -1
while True:
if not fronteira:
explorados = None
return busca_dfs(nodo_inicial, custo_maximo_atual + 1)
v = fronteira.pop()
if v.estado == objetivo:
return busca_caminho(v, nodo_inicial)
if v not in explorados:
explorados.add(v)
estados_sucessores = sucessor.sucessor(v.estado)
if v.custo + 1 < custo_maximo_atual:
for e in estados_sucessores:
filho = expande.Nodo(e[1], v, e[0], v.custo + 1)
fronteira.append(filho)
def main():
estado_inicial = sys.argv[1]
custo_inicial = 0
pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)
caminho = busca_dfs(pai, 1)
while caminho:
print(caminho.pop(), end=' ')
print()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def busca_caminho(nodo_final, nodo_inicial):
pilha_acoes = deque()
v = nodo_final
while v != nodo_inicial:
pilha_acoes.append(v.acao)
v = v.pai
return pilha_acoes
def busca_dfs(nodo_inicial, custo_maximo_atual):
objetivo = '12345678_'
custo_maximo_absoluto = 100
explorados = set()
fronteira = deque()
fronteira.append(nodo_inicial)
if custo_maximo_atual > custo_maximo_absoluto:
return -1
while True:
if not fronteira:
explorados = None
return busca_dfs(nodo_inicial, custo_maximo_atual + 1)
v = fronteira.pop()
if v.estado == objetivo:
return busca_caminho(v, nodo_inicial)
if v not in explorados:
explorados.add(v)
estados_sucessores = sucessor.sucessor(v.estado)
if v.custo + 1 < custo_maximo_atual:
for e in estados_sucessores:
filho = expande.Nodo(e[1], v, e[0], v.custo + 1)
fronteira.append(filho)
def main():
estado_inicial = sys.argv[1]
custo_inicial = 0
pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)
caminho = busca_dfs(pai, 1)
while caminho:
print(caminho.pop(), end=' ')
print()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def busca_caminho(nodo_final, nodo_inicial):
pilha_acoes = deque()
v = nodo_final
while v != nodo_inicial:
pilha_acoes.append(v.acao)
v = v.pai
return pilha_acoes
def busca_dfs(nodo_inicial, custo_maximo_atual):
objetivo = '12345678_'
custo_maximo_absoluto = 100
explorados = set()
fronteira = deque()
fronteira.append(nodo_inicial)
if custo_maximo_atual > custo_maximo_absoluto:
return -1
while True:
if not fronteira:
explorados = None
return busca_dfs(nodo_inicial, custo_maximo_atual + 1)
v = fronteira.pop()
if v.estado == objetivo:
return busca_caminho(v, nodo_inicial)
if v not in explorados:
explorados.add(v)
estados_sucessores = sucessor.sucessor(v.estado)
if v.custo + 1 < custo_maximo_atual:
for e in estados_sucessores:
filho = expande.Nodo(e[1], v, e[0], v.custo + 1)
fronteira.append(filho)
def main():
estado_inicial = sys.argv[1]
custo_inicial = 0
pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)
caminho = busca_dfs(pai, 1)
while caminho:
print(caminho.pop(), end=' ')
print()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import sys
import sucessor
import expande
from collections import deque
def busca_caminho(nodo_final, nodo_inicial):
pilha_acoes = deque()
v = nodo_final
while v != nodo_inicial:
pilha_acoes.append(v.acao)
v = v.pai
return pilha_acoes
def busca_dfs(nodo_inicial, custo_maximo_atual):
objetivo = '12345678_'
custo_maximo_absoluto = 100
explorados = set()
fronteira = deque()
fronteira.append(nodo_inicial)
if custo_maximo_atual > custo_maximo_absoluto:
return -1
while True:
if not fronteira:
explorados = None
return busca_dfs(nodo_inicial, custo_maximo_atual + 1)
v = fronteira.pop()
if v.estado == objetivo:
return busca_caminho(v, nodo_inicial)
if v not in explorados:
explorados.add(v)
estados_sucessores = sucessor.sucessor(v.estado)
if v.custo + 1 < custo_maximo_atual:
for e in estados_sucessores:
filho = expande.Nodo(e[1], v, e[0], v.custo + 1)
fronteira.append(filho)
def main():
estado_inicial = sys.argv[1]
custo_inicial = 0
pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)
caminho = busca_dfs(pai, 1)
while caminho:
print(caminho.pop(), end=' ')
print()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import sys
import sucessor
import expande
from collections import deque
def busca_caminho(nodo_final, nodo_inicial):
pilha_acoes = deque() # iremos empilhar as acoes já que a estaremos com a ordem reversa a priori
v = nodo_final
while v != nodo_inicial:
pilha_acoes.append(v.acao)
v = v.pai
return pilha_acoes
def busca_dfs(nodo_inicial, custo_maximo_atual):
objetivo = "12345678_"
custo_maximo_absoluto = 100 #profundedade maxima tolerada
explorados = set()
fronteira = deque()
fronteira.append(nodo_inicial)
if custo_maximo_atual > custo_maximo_absoluto: #se a profundedade maxima atual é maior do que a profundedade maxima tolerada retorna -1 pois provavelmente não existe uma solução
return -1
while True:
if not fronteira: # Se a fronteira esta vazia
explorados = None
return busca_dfs(nodo_inicial, custo_maximo_atual + 1) #executa a função novamente mas dessa vez com uma profundedade maxima maior
v = fronteira.pop() #pop em vez de popleft para tratar a fronteira como pilha
if v.estado == objetivo:
return busca_caminho(v, nodo_inicial)
if v not in explorados:
explorados.add(v)
estados_sucessores = sucessor.sucessor(v.estado)
# Cada estado atingível a partir de v é acrescentado à fronteira caso a profundidade dos novos estados não exceda a profundidade máxima
if (v.custo + 1) < custo_maximo_atual:
for e in estados_sucessores:
filho = expande.Nodo(e[1], v, e[0], v.custo + 1)
fronteira.append(filho)
def main():
#como eu não queria ter que modificar as classes que já existiam, usei o custo de cada estado como um sinônimo de profundidade, já que os novos estados sempre tem custo = custo do pai + 1
estado_inicial = sys.argv[1]
custo_inicial = 0
pai = expande.Nodo(estado_inicial, 0, "", custo_inicial)
caminho = busca_dfs(pai, 1)
while caminho:
print(caminho.pop(), end = " ")
print()
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "a85a7ad6ffb2b9aa5f5326d11c75ddbee680fac4",
"index": 673,
"step-1": "<mask token>\n\n\ndef busca_dfs(nodo_inicial, custo_maximo_atual):\n objetivo = '12345678_'\n custo_maximo_absoluto = 100\n explorados = set()\n fronteira = deque()\n fronteira.append(nodo_inicial)\n if custo_maximo_atual > custo_maximo_absoluto:\n return -1\n while True:\n if not fronteira:\n explorados = None\n return busca_dfs(nodo_inicial, custo_maximo_atual + 1)\n v = fronteira.pop()\n if v.estado == objetivo:\n return busca_caminho(v, nodo_inicial)\n if v not in explorados:\n explorados.add(v)\n estados_sucessores = sucessor.sucessor(v.estado)\n if v.custo + 1 < custo_maximo_atual:\n for e in estados_sucessores:\n filho = expande.Nodo(e[1], v, e[0], v.custo + 1)\n fronteira.append(filho)\n\n\ndef main():\n estado_inicial = sys.argv[1]\n custo_inicial = 0\n pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)\n caminho = busca_dfs(pai, 1)\n while caminho:\n print(caminho.pop(), end=' ')\n print()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef busca_caminho(nodo_final, nodo_inicial):\n pilha_acoes = deque()\n v = nodo_final\n while v != nodo_inicial:\n pilha_acoes.append(v.acao)\n v = v.pai\n return pilha_acoes\n\n\ndef busca_dfs(nodo_inicial, custo_maximo_atual):\n objetivo = '12345678_'\n custo_maximo_absoluto = 100\n explorados = set()\n fronteira = deque()\n fronteira.append(nodo_inicial)\n if custo_maximo_atual > custo_maximo_absoluto:\n return -1\n while True:\n if not fronteira:\n explorados = None\n return busca_dfs(nodo_inicial, custo_maximo_atual + 1)\n v = fronteira.pop()\n if v.estado == objetivo:\n return busca_caminho(v, nodo_inicial)\n if v not in explorados:\n explorados.add(v)\n estados_sucessores = sucessor.sucessor(v.estado)\n if v.custo + 1 < custo_maximo_atual:\n for e in estados_sucessores:\n filho = expande.Nodo(e[1], v, e[0], v.custo + 1)\n fronteira.append(filho)\n\n\ndef main():\n estado_inicial = sys.argv[1]\n custo_inicial = 0\n pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)\n caminho = busca_dfs(pai, 1)\n while caminho:\n print(caminho.pop(), end=' ')\n print()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef busca_caminho(nodo_final, nodo_inicial):\n pilha_acoes = deque()\n v = nodo_final\n while v != nodo_inicial:\n pilha_acoes.append(v.acao)\n v = v.pai\n return pilha_acoes\n\n\ndef busca_dfs(nodo_inicial, custo_maximo_atual):\n objetivo = '12345678_'\n custo_maximo_absoluto = 100\n explorados = set()\n fronteira = deque()\n fronteira.append(nodo_inicial)\n if custo_maximo_atual > custo_maximo_absoluto:\n return -1\n while True:\n if not fronteira:\n explorados = None\n return busca_dfs(nodo_inicial, custo_maximo_atual + 1)\n v = fronteira.pop()\n if v.estado == objetivo:\n return busca_caminho(v, nodo_inicial)\n if v not in explorados:\n explorados.add(v)\n estados_sucessores = sucessor.sucessor(v.estado)\n if v.custo + 1 < custo_maximo_atual:\n for e in estados_sucessores:\n filho = expande.Nodo(e[1], v, e[0], v.custo + 1)\n fronteira.append(filho)\n\n\ndef main():\n estado_inicial = sys.argv[1]\n custo_inicial = 0\n pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)\n caminho = busca_dfs(pai, 1)\n while caminho:\n print(caminho.pop(), end=' ')\n print()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport sucessor\nimport expande\nfrom collections import deque\n\n\ndef busca_caminho(nodo_final, nodo_inicial):\n pilha_acoes = deque()\n v = nodo_final\n while v != nodo_inicial:\n pilha_acoes.append(v.acao)\n v = v.pai\n return pilha_acoes\n\n\ndef busca_dfs(nodo_inicial, custo_maximo_atual):\n objetivo = '12345678_'\n custo_maximo_absoluto = 100\n explorados = set()\n fronteira = deque()\n fronteira.append(nodo_inicial)\n if custo_maximo_atual > custo_maximo_absoluto:\n return -1\n while True:\n if not fronteira:\n explorados = None\n return busca_dfs(nodo_inicial, custo_maximo_atual + 1)\n v = fronteira.pop()\n if v.estado == objetivo:\n return busca_caminho(v, nodo_inicial)\n if v not in explorados:\n explorados.add(v)\n estados_sucessores = sucessor.sucessor(v.estado)\n if v.custo + 1 < custo_maximo_atual:\n for e in estados_sucessores:\n filho = expande.Nodo(e[1], v, e[0], v.custo + 1)\n fronteira.append(filho)\n\n\ndef main():\n estado_inicial = sys.argv[1]\n custo_inicial = 0\n pai = expande.Nodo(estado_inicial, 0, '', custo_inicial)\n caminho = busca_dfs(pai, 1)\n while caminho:\n print(caminho.pop(), end=' ')\n print()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import sys\nimport sucessor\nimport expande\nfrom collections import deque\n\ndef busca_caminho(nodo_final, nodo_inicial):\n\tpilha_acoes = deque() # iremos empilhar as acoes já que a estaremos com a ordem reversa a priori\n\tv = nodo_final\n\twhile v != nodo_inicial:\n\t\tpilha_acoes.append(v.acao)\n\t\tv = v.pai\n\treturn pilha_acoes\n\ndef busca_dfs(nodo_inicial, custo_maximo_atual):\n\tobjetivo = \"12345678_\"\n\tcusto_maximo_absoluto = 100 #profundedade maxima tolerada\n\texplorados = set()\n\tfronteira = deque()\n\tfronteira.append(nodo_inicial)\n\tif custo_maximo_atual > custo_maximo_absoluto: #se a profundedade maxima atual é maior do que a profundedade maxima tolerada retorna -1 pois provavelmente não existe uma solução\n\t\treturn -1\n\twhile True:\n\t\tif not fronteira: # Se a fronteira esta vazia\n\t\t\texplorados = None\n\t\t\treturn busca_dfs(nodo_inicial, custo_maximo_atual + 1) #executa a função novamente mas dessa vez com uma profundedade maxima maior\n\t\tv = fronteira.pop() #pop em vez de popleft para tratar a fronteira como pilha\n\t\tif v.estado == objetivo:\n\t\t\treturn busca_caminho(v, nodo_inicial)\n\t\tif v not in explorados:\n\t\t\texplorados.add(v)\n\t\t\testados_sucessores = sucessor.sucessor(v.estado)\n\t\t\t# Cada estado atingível a partir de v é acrescentado à fronteira caso a profundidade dos novos estados não exceda a profundidade máxima\n\t\t\tif (v.custo + 1) < custo_maximo_atual:\n\t\t\t\tfor e in estados_sucessores:\n\t\t\t\t\tfilho = expande.Nodo(e[1], v, e[0], v.custo + 1)\n\t\t\t\t\tfronteira.append(filho)\n\ndef main():\n\t#como eu não queria ter que modificar as classes que já existiam, usei o custo de cada estado como um sinônimo de profundidade, já que os novos estados sempre tem custo = custo do pai + 1\n\testado_inicial = sys.argv[1]\n\tcusto_inicial = 0\n\tpai = expande.Nodo(estado_inicial, 0, \"\", custo_inicial)\n\tcaminho = busca_dfs(pai, 1)\n\n\twhile caminho:\n\t\tprint(caminho.pop(), end = \" \")\n\tprint()\n\nif __name__ == '__main__':\n\tmain()\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#Program to create and store Employee Salary Records in a file
import os
def appendEmployee(eno,name,basic):
fh=open("Employee.txt","a")
hra=basic*0.10
da=basic*0.73
gross=basic+hra+da
tax=gross*0.3
net=gross-tax
line=str(eno)+","+name+","+str(basic)+","+str(hra)+","+str(da)+","+str(gross)+","+str(tax)+","+str(net)+"\n"
fh.write(line)
fh.close()
def displayEmployees():
fh=open("Employee.txt","r")
for line in fh:
emp=line.split(",")
print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7])
fh.close()
def searchEmployee(eno):
fh=open("Employee.txt","r")
flag=False
for line in fh:
emp=line.split(",")
if(int(emp[0])==eno):
print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7])
flag=True
break
if flag==False:
print("Employee record not found")
fh.close()
def deleteEmployee(eno):
count=0
fh=open("Employee.txt","r")
ftemp=open("Temp.txt","w")
for line in fh:
emp=line.split(",")
if(int(emp[0])!=eno):
ftemp.write(line)
else:
count+=1
continue
fh.close()
ftemp.close()
if count==0:
print("Employee record not found")
os.remove("Temp.txt")
else:
print("No of Employee records deleted:",count)
os.remove("Employee.txt")
os.rename("Temp.txt","Employee.txt")
def modifyEmployee(eno):
count=0
fh=open("Employee.txt","r")
ftemp=open("Temp.txt","w")
for line in fh:
emp=line.split(",")
if(int(emp[0])==eno):
print("Existing Employee record:")
print("\nEmployee No:",emp[0],"\nEmployee Name:",emp[1],"\nBasic:",emp[2],"\nHRA:",emp[3],"\nDA:",emp[4],"\nGross Salary:",emp[5],"\nIncome Tax:",emp[6],"\nNet Salary:",emp[7])
print("Enter New Employee details")
emp[1]=input("Enter Employee Name:")
emp[2]=int(input("Enter Employee Basic Salary:"))
emp[3]=emp[2]*0.10 #HRA
emp[4]=emp[2]*0.73 #DA
emp[5]=emp[2]+emp[3]+emp[4] #Gross
emp[6]=emp[5]*0.3 #Tax
emp[7]=emp[5]-emp[6]
print(emp[0]) #Net
line=emp[0]+","+emp[1]+","+str(emp[2])+","+str(emp[3])+","+str(emp[4])+","+str(emp[5])+","+str(emp[6])+","+str(emp[7])+"\n"
count+=1
ftemp.write(line)
fh.close()
ftemp.close()
if count==0:
print("Employee record not found")
os.remove("Temp.txt")
else:
print("No of Employee records modified:",count)
os.remove("Employee.txt")
os.rename("Temp.txt","Employee.txt")
if __name__=="__main__":
while True:
ch=int(input("1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n"))
if ch==1:
eno=int(input("Enter Employee No:"))
name=input("Enter Employee Name:")
basic=int(input("Enter Employee Basic salary:"))
appendEmployee(eno,name,basic)
elif ch==2:
displayEmployees()
elif ch==3:
eno=int(input("Enter Employee No to search:"))
searchEmployee(eno)
elif ch==4:
eno=int(input("Enter Employee No to delete:"))
deleteEmployee(eno)
elif ch==5:
eno=int(input("Enter Employee No to modify:"))
modifyEmployee(eno)
else:
break
|
normal
|
{
"blob_id": "5b6241907cc97f82d6c6e0a461f4f71a9a567204",
"index": 5395,
"step-1": "<mask token>\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\n<mask token>\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\nif __name__ == '__main__':\n while True:\n ch = int(input(\n \"\"\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n\"\"\"\n ))\n if ch == 1:\n eno = int(input('Enter Employee No:'))\n name = input('Enter Employee Name:')\n basic = int(input('Enter Employee Basic salary:'))\n appendEmployee(eno, name, basic)\n elif ch == 2:\n displayEmployees()\n elif ch == 3:\n eno = int(input('Enter Employee No to search:'))\n searchEmployee(eno)\n elif ch == 4:\n eno = int(input('Enter Employee No to delete:'))\n deleteEmployee(eno)\n elif ch == 5:\n eno = int(input('Enter Employee No to modify:'))\n modifyEmployee(eno)\n else:\n break\n",
"step-4": "import os\n\n\ndef appendEmployee(eno, name, basic):\n fh = open('Employee.txt', 'a')\n hra = basic * 0.1\n da = basic * 0.73\n gross = basic + hra + da\n tax = gross * 0.3\n net = gross - tax\n line = str(eno) + ',' + name + ',' + str(basic) + ',' + str(hra\n ) + ',' + str(da) + ',' + str(gross) + ',' + str(tax) + ',' + str(net\n ) + '\\n'\n fh.write(line)\n fh.close()\n\n\ndef displayEmployees():\n fh = open('Employee.txt', 'r')\n for line in fh:\n emp = line.split(',')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n fh.close()\n\n\ndef searchEmployee(eno):\n fh = open('Employee.txt', 'r')\n flag = False\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n flag = True\n break\n if flag == False:\n print('Employee record not found')\n fh.close()\n\n\ndef deleteEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) != eno:\n ftemp.write(line)\n else:\n count += 1\n continue\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records deleted:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\ndef modifyEmployee(eno):\n count = 0\n fh = open('Employee.txt', 'r')\n ftemp = open('Temp.txt', 'w')\n for line in fh:\n emp = line.split(',')\n if int(emp[0]) == eno:\n print('Existing Employee record:')\n print('\\nEmployee No:', emp[0], '\\nEmployee Name:', emp[1],\n '\\nBasic:', emp[2], '\\nHRA:', emp[3], '\\nDA:', emp[4],\n '\\nGross Salary:', emp[5], '\\nIncome Tax:', emp[6],\n '\\nNet Salary:', emp[7])\n print('Enter New Employee details')\n emp[1] = input('Enter Employee Name:')\n emp[2] = int(input('Enter Employee Basic Salary:'))\n emp[3] = emp[2] * 0.1\n emp[4] = emp[2] * 0.73\n emp[5] = emp[2] + emp[3] + emp[4]\n emp[6] = emp[5] * 0.3\n emp[7] = emp[5] - emp[6]\n print(emp[0])\n line = emp[0] + ',' + emp[1] + ',' + str(emp[2]) + ',' + str(emp[3]\n ) + ',' + str(emp[4]) + ',' + str(emp[5]) + ',' + str(emp[6]\n ) + ',' + str(emp[7]) + '\\n'\n count += 1\n ftemp.write(line)\n fh.close()\n ftemp.close()\n if count == 0:\n print('Employee record not found')\n os.remove('Temp.txt')\n else:\n print('No of Employee records modified:', count)\n os.remove('Employee.txt')\n os.rename('Temp.txt', 'Employee.txt')\n\n\nif __name__ == '__main__':\n while True:\n ch = int(input(\n \"\"\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\n\"\"\"\n ))\n if ch == 1:\n eno = int(input('Enter Employee No:'))\n name = input('Enter Employee Name:')\n basic = int(input('Enter Employee Basic salary:'))\n appendEmployee(eno, name, basic)\n elif ch == 2:\n displayEmployees()\n elif ch == 3:\n eno = int(input('Enter Employee No to search:'))\n searchEmployee(eno)\n elif ch == 4:\n eno = int(input('Enter Employee No to delete:'))\n deleteEmployee(eno)\n elif ch == 5:\n eno = int(input('Enter Employee No to modify:'))\n modifyEmployee(eno)\n else:\n break\n",
"step-5": "#Program to create and store Employee Salary Records in a file\n\nimport os\n\ndef appendEmployee(eno,name,basic):\n\tfh=open(\"Employee.txt\",\"a\")\n\thra=basic*0.10\n\tda=basic*0.73\n\tgross=basic+hra+da\n\ttax=gross*0.3\n\tnet=gross-tax\n\tline=str(eno)+\",\"+name+\",\"+str(basic)+\",\"+str(hra)+\",\"+str(da)+\",\"+str(gross)+\",\"+str(tax)+\",\"+str(net)+\"\\n\"\n\tfh.write(line)\n\tfh.close()\n\n\n\ndef displayEmployees():\n\tfh=open(\"Employee.txt\",\"r\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\tfh.close()\n\n\n\ndef searchEmployee(eno):\n\tfh=open(\"Employee.txt\",\"r\")\n\tflag=False\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])==eno):\n\t\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\t\t\tflag=True\n\t\t\tbreak\n\tif flag==False:\n\t\tprint(\"Employee record not found\")\n\tfh.close()\n\n\n\ndef deleteEmployee(eno):\n\tcount=0\n\tfh=open(\"Employee.txt\",\"r\")\n\tftemp=open(\"Temp.txt\",\"w\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])!=eno):\n\t\t\tftemp.write(line)\n\t\telse:\n\t\t\tcount+=1\n\t\t\tcontinue\n\tfh.close()\n\tftemp.close()\n\n\tif count==0:\n\t\tprint(\"Employee record not found\")\n\t\tos.remove(\"Temp.txt\")\n\telse:\n\t\tprint(\"No of Employee records deleted:\",count)\n\t\tos.remove(\"Employee.txt\")\n\t\tos.rename(\"Temp.txt\",\"Employee.txt\")\n\n\n\n\ndef modifyEmployee(eno):\n\tcount=0\n\tfh=open(\"Employee.txt\",\"r\")\n\tftemp=open(\"Temp.txt\",\"w\")\n\tfor line in fh:\n\t\temp=line.split(\",\")\n\t\tif(int(emp[0])==eno):\n\t\t\tprint(\"Existing Employee record:\")\n\t\t\tprint(\"\\nEmployee No:\",emp[0],\"\\nEmployee Name:\",emp[1],\"\\nBasic:\",emp[2],\"\\nHRA:\",emp[3],\"\\nDA:\",emp[4],\"\\nGross Salary:\",emp[5],\"\\nIncome Tax:\",emp[6],\"\\nNet Salary:\",emp[7])\n\t\t\tprint(\"Enter New Employee details\")\n\t\t\temp[1]=input(\"Enter Employee Name:\")\n\t\t\temp[2]=int(input(\"Enter Employee Basic Salary:\"))\n\t\t\temp[3]=emp[2]*0.10\t\t\t\t\t\t\t\t\t\t#HRA\n\t\t\temp[4]=emp[2]*0.73\t\t\t\t\t\t\t\t\t\t#DA\n\t\t\temp[5]=emp[2]+emp[3]+emp[4]\t\t\t\t\t\t\t\t#Gross\n\t\t\temp[6]=emp[5]*0.3\t\t\t\t\t\t\t\t\t\t#Tax\n\t\t\temp[7]=emp[5]-emp[6]\n\t\t\tprint(emp[0])\t\t\t\t\t\t\t\t\t#Net\n\t\t\tline=emp[0]+\",\"+emp[1]+\",\"+str(emp[2])+\",\"+str(emp[3])+\",\"+str(emp[4])+\",\"+str(emp[5])+\",\"+str(emp[6])+\",\"+str(emp[7])+\"\\n\"\n\t\t\tcount+=1\n\t\tftemp.write(line)\n\n\tfh.close()\n\tftemp.close()\n\n\tif count==0:\n\t\tprint(\"Employee record not found\")\n\t\tos.remove(\"Temp.txt\")\n\telse:\n\t\tprint(\"No of Employee records modified:\",count)\n\t\tos.remove(\"Employee.txt\")\n\t\tos.rename(\"Temp.txt\",\"Employee.txt\")\n\n\n\nif __name__==\"__main__\":\n\twhile True:\n\t\tch=int(input(\"1->New Employee 2->Display Employee records 3->Search Employee 4->Delete Employee 5->Modify Employee 6->Exit\\n\"))\n\t\tif ch==1:\n\t\t\teno=int(input(\"Enter Employee No:\"))\n\t\t\tname=input(\"Enter Employee Name:\")\n\t\t\tbasic=int(input(\"Enter Employee Basic salary:\"))\n\t\t\tappendEmployee(eno,name,basic)\n\n\t\telif ch==2:\n\t\t\tdisplayEmployees()\n\n\t\telif ch==3:\n\t\t\teno=int(input(\"Enter Employee No to search:\"))\n\t\t\tsearchEmployee(eno)\n\n\t\telif ch==4:\n\t\t\teno=int(input(\"Enter Employee No to delete:\"))\n\t\t\tdeleteEmployee(eno)\n\n\t\telif ch==5:\n\t\t\teno=int(input(\"Enter Employee No to modify:\"))\n\t\t\tmodifyEmployee(eno)\n\n\t\telse:\n\t\t\tbreak\n\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
from nltk.classify import NaiveBayesClassifier
from nltk.probability import FreqDist
import csv
f = open('trolls.csv', 'r')
file = csv.reader(f)
sentences=[]
remarks=[]
psObject = PorterStemmer()
illegal_chars = [
'.',',','@',"'",'+','-','*',
]
paragraph=''
for kk in file :
paragraph+=kk[0]
f.close()
f = open('trolls.csv', 'r')
file = csv.reader(f)
all_words = word_tokenize(paragraph)
# print(all_words)
all2 = FreqDist(all_words)
most_common_words = list(all2.most_common(100))
print('most commons below...')
print(most_common_words)
most_cm_1=[]
for i,j in most_common_words:
most_cm_1.append(i)
# print(most_cm_1)
stopWords = stopwords.words('english')
all_words = []
for i in file :
filtered=''
filtered_from_stopWords=''
counter = 0
for j in range(len(illegal_chars)) :
if counter == 0:
counter+=1
filtered = i[0].replace(illegal_chars[j], '')
else :
filtered=filtered.replace(illegal_chars[j],'')
counter=0
filteredArr = filtered.split(' ')
for x in filteredArr :
if x not in stopWords :
filtered_from_stopWords+=x+' '
bb=[]
filtered_from_stopWords_ARRAY=filtered_from_stopWords.split(' ')
features = {w.lower(): (w in most_cm_1) for w in filtered_from_stopWords_ARRAY}
bb.append(features)
bb.append(i[1])
sentences.append(bb)
remarks.append(i[1])
count =0
print(remarks)
print(sentences)
classifier = NaiveBayesClassifier.train(sentences)
inputs = input('Enter a comment ')
words_entered=inputs.split(' ')
entry = {w: ( True) for w in words_entered}
print(classifier.classify(entry))
|
normal
|
{
"blob_id": "0dbdd7f7adffed850f126a2054c764b421c6ab84",
"index": 6799,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor kk in file:\n paragraph += kk[0]\nf.close()\n<mask token>\nprint('most commons below...')\nprint(most_common_words)\n<mask token>\nfor i, j in most_common_words:\n most_cm_1.append(i)\n<mask token>\nfor i in file:\n filtered = ''\n filtered_from_stopWords = ''\n counter = 0\n for j in range(len(illegal_chars)):\n if counter == 0:\n counter += 1\n filtered = i[0].replace(illegal_chars[j], '')\n else:\n filtered = filtered.replace(illegal_chars[j], '')\n counter = 0\n filteredArr = filtered.split(' ')\n for x in filteredArr:\n if x not in stopWords:\n filtered_from_stopWords += x + ' '\n bb = []\n filtered_from_stopWords_ARRAY = filtered_from_stopWords.split(' ')\n features = {w.lower(): (w in most_cm_1) for w in\n filtered_from_stopWords_ARRAY}\n bb.append(features)\n bb.append(i[1])\n sentences.append(bb)\n remarks.append(i[1])\n<mask token>\nprint(remarks)\nprint(sentences)\n<mask token>\nprint(classifier.classify(entry))\n",
"step-3": "<mask token>\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nsentences = []\nremarks = []\npsObject = PorterStemmer()\nillegal_chars = ['.', ',', '@', \"'\", '+', '-', '*']\nparagraph = ''\nfor kk in file:\n paragraph += kk[0]\nf.close()\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nall_words = word_tokenize(paragraph)\nall2 = FreqDist(all_words)\nmost_common_words = list(all2.most_common(100))\nprint('most commons below...')\nprint(most_common_words)\nmost_cm_1 = []\nfor i, j in most_common_words:\n most_cm_1.append(i)\nstopWords = stopwords.words('english')\nall_words = []\nfor i in file:\n filtered = ''\n filtered_from_stopWords = ''\n counter = 0\n for j in range(len(illegal_chars)):\n if counter == 0:\n counter += 1\n filtered = i[0].replace(illegal_chars[j], '')\n else:\n filtered = filtered.replace(illegal_chars[j], '')\n counter = 0\n filteredArr = filtered.split(' ')\n for x in filteredArr:\n if x not in stopWords:\n filtered_from_stopWords += x + ' '\n bb = []\n filtered_from_stopWords_ARRAY = filtered_from_stopWords.split(' ')\n features = {w.lower(): (w in most_cm_1) for w in\n filtered_from_stopWords_ARRAY}\n bb.append(features)\n bb.append(i[1])\n sentences.append(bb)\n remarks.append(i[1])\ncount = 0\nprint(remarks)\nprint(sentences)\nclassifier = NaiveBayesClassifier.train(sentences)\ninputs = input('Enter a comment ')\nwords_entered = inputs.split(' ')\nentry = {w: (True) for w in words_entered}\nprint(classifier.classify(entry))\n",
"step-4": "from nltk.tokenize import sent_tokenize, word_tokenize\nfrom nltk.corpus import stopwords\nfrom nltk.stem import PorterStemmer\nfrom nltk.classify import NaiveBayesClassifier\nfrom nltk.probability import FreqDist\nimport csv\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nsentences = []\nremarks = []\npsObject = PorterStemmer()\nillegal_chars = ['.', ',', '@', \"'\", '+', '-', '*']\nparagraph = ''\nfor kk in file:\n paragraph += kk[0]\nf.close()\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nall_words = word_tokenize(paragraph)\nall2 = FreqDist(all_words)\nmost_common_words = list(all2.most_common(100))\nprint('most commons below...')\nprint(most_common_words)\nmost_cm_1 = []\nfor i, j in most_common_words:\n most_cm_1.append(i)\nstopWords = stopwords.words('english')\nall_words = []\nfor i in file:\n filtered = ''\n filtered_from_stopWords = ''\n counter = 0\n for j in range(len(illegal_chars)):\n if counter == 0:\n counter += 1\n filtered = i[0].replace(illegal_chars[j], '')\n else:\n filtered = filtered.replace(illegal_chars[j], '')\n counter = 0\n filteredArr = filtered.split(' ')\n for x in filteredArr:\n if x not in stopWords:\n filtered_from_stopWords += x + ' '\n bb = []\n filtered_from_stopWords_ARRAY = filtered_from_stopWords.split(' ')\n features = {w.lower(): (w in most_cm_1) for w in\n filtered_from_stopWords_ARRAY}\n bb.append(features)\n bb.append(i[1])\n sentences.append(bb)\n remarks.append(i[1])\ncount = 0\nprint(remarks)\nprint(sentences)\nclassifier = NaiveBayesClassifier.train(sentences)\ninputs = input('Enter a comment ')\nwords_entered = inputs.split(' ')\nentry = {w: (True) for w in words_entered}\nprint(classifier.classify(entry))\n",
"step-5": "from nltk.tokenize import sent_tokenize, word_tokenize\nfrom nltk.corpus import stopwords\nfrom nltk.stem import PorterStemmer\nfrom nltk.classify import NaiveBayesClassifier\nfrom nltk.probability import FreqDist\nimport csv\n\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nsentences=[]\nremarks=[]\npsObject = PorterStemmer()\n\nillegal_chars = [\n '.',',','@',\"'\",'+','-','*',\n]\nparagraph=''\n\nfor kk in file :\n paragraph+=kk[0]\nf.close()\nf = open('trolls.csv', 'r')\nfile = csv.reader(f)\nall_words = word_tokenize(paragraph)\n# print(all_words)\nall2 = FreqDist(all_words)\nmost_common_words = list(all2.most_common(100))\nprint('most commons below...')\nprint(most_common_words)\nmost_cm_1=[]\nfor i,j in most_common_words:\n most_cm_1.append(i)\n# print(most_cm_1)\nstopWords = stopwords.words('english')\nall_words = []\nfor i in file :\n filtered=''\n filtered_from_stopWords=''\n counter = 0\n for j in range(len(illegal_chars)) :\n if counter == 0:\n counter+=1\n filtered = i[0].replace(illegal_chars[j], '')\n else :\n filtered=filtered.replace(illegal_chars[j],'')\n counter=0\n filteredArr = filtered.split(' ')\n for x in filteredArr :\n if x not in stopWords :\n filtered_from_stopWords+=x+' '\n bb=[]\n filtered_from_stopWords_ARRAY=filtered_from_stopWords.split(' ')\n features = {w.lower(): (w in most_cm_1) for w in filtered_from_stopWords_ARRAY}\n bb.append(features)\n bb.append(i[1])\n sentences.append(bb)\n remarks.append(i[1])\n\ncount =0\nprint(remarks)\nprint(sentences)\nclassifier = NaiveBayesClassifier.train(sentences)\ninputs = input('Enter a comment ')\nwords_entered=inputs.split(' ')\nentry = {w: ( True) for w in words_entered}\n\nprint(classifier.classify(entry))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 4 12:47:30 2019
Title: MP4-Medical Image Processing
@author: MP4 Team
"""
# Validate window controller
class ValidateWindowCtr(object):
# Initialization
def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans, vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans, index_truth, index_segmen):
self.fig = fig
self.im_trans, self.im_truth, self.im_segmen = im_trans, im_truth, im_segmen
self.vol_trans, self.vol_truth, self.vol_segmen = vol_trans, vol_truth, vol_segmen
self.ax_trans, self.ax_truth, self.ax_segmen = ax_trans, ax_truth, ax_segmen
self.index_trans, self.index_truth, self.index_segmen = index_trans, index_truth, index_segmen
self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: '+str(self.index_trans[-1]), color='b')
self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: '+str(self.index_truth[-1]), color='b')
self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: '+str(self.index_segmen[-1]), color='b')
self.scroll_trans = None
self.scroll_truth = None
self.scroll_segmen = None
self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)
self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)
# Enable scrolling image
def fig_enter_event(self, event):
if self.ax_trans.in_axes(event):
self.scroll_trans = self.fig.canvas.mpl_connect('scroll_event', self.trans_subplot_scroll)
elif self.ax_truth.in_axes(event):
self.scroll_truth = self.fig.canvas.mpl_connect('scroll_event', self.truth_subplot_scroll)
elif self.ax_segmen.in_axes(event):
self.scroll_segmen = self.fig.canvas.mpl_connect('scroll_event', self.segmen_subplot_scroll)
# Disable scrolling image
def fig_leave_event(self, event):
self.fig.canvas.mpl_disconnect(self.scroll_trans)
self.fig.canvas.mpl_disconnect(self.scroll_truth)
self.fig.canvas.mpl_disconnect(self.scroll_segmen)
# Scroll voxel image
def trans_subplot_scroll(self, event):
if event.button == 'down' and (self.index_trans[-1] > -1*self.vol_trans.shape[0]):
self.index_trans[-1] -= 1
if event.button == 'up' and (self.index_trans[-1] < self.vol_trans.shape[0]-1):
self.index_trans[-1] += 1
self.im_trans.set_data(self.vol_trans[self.index_trans[-1]])
self.txt_trans.set_text('Slice No: '+str(self.index_trans[-1]))
self.fig.canvas.draw_idle()
# Scroll ground truth image
def truth_subplot_scroll(self, event):
if event.button == 'down' and (self.index_truth[-1] > -1*self.vol_truth.shape[0]):
self.index_truth[-1] -= 1
if event.button == 'up' and (self.index_truth[-1] < self.vol_truth.shape[0]-1):
self.index_truth[-1] += 1
self.im_truth.set_data(self.vol_truth[self.index_truth[-1]])
self.txt_truth.set_text('Slice No: '+str(self.index_truth[-1]))
self.fig.canvas.draw_idle()
# Scroll segmented image
def segmen_subplot_scroll(self, event):
if event.button == 'down' and (self.index_segmen[-1] > -1*self.vol_segmen.shape[0]):
self.index_segmen[-1] -= 1
if event.button == 'up' and (self.index_segmen[-1] < self.vol_segmen.shape[0]-1):
self.index_segmen[-1] += 1
self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])
self.txt_segmen.set_text('Slice No: '+str(self.index_segmen[-1]))
self.fig.canvas.draw_idle()
|
normal
|
{
"blob_id": "e0b28fdcbc3160bcccbb032949317a91a32eeb1b",
"index": 5394,
"step-1": "<mask token>\n\n\nclass ValidateWindowCtr(object):\n\n def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans,\n vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans,\n index_truth, index_segmen):\n self.fig = fig\n self.im_trans, self.im_truth, self.im_segmen = (im_trans, im_truth,\n im_segmen)\n self.vol_trans, self.vol_truth, self.vol_segmen = (vol_trans,\n vol_truth, vol_segmen)\n self.ax_trans, self.ax_truth, self.ax_segmen = (ax_trans, ax_truth,\n ax_segmen)\n self.index_trans, self.index_truth, self.index_segmen = (\n index_trans, index_truth, index_segmen)\n self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: ' + str(self\n .index_trans[-1]), color='b')\n self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: ' + str(self.\n index_truth[-1]), color='b')\n self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: ' + str(\n self.index_segmen[-1]), color='b')\n self.scroll_trans = None\n self.scroll_truth = None\n self.scroll_segmen = None\n self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)\n self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)\n <mask token>\n\n def fig_leave_event(self, event):\n self.fig.canvas.mpl_disconnect(self.scroll_trans)\n self.fig.canvas.mpl_disconnect(self.scroll_truth)\n self.fig.canvas.mpl_disconnect(self.scroll_segmen)\n <mask token>\n <mask token>\n\n def segmen_subplot_scroll(self, event):\n if event.button == 'down' and self.index_segmen[-1\n ] > -1 * self.vol_segmen.shape[0]:\n self.index_segmen[-1] -= 1\n if event.button == 'up' and self.index_segmen[-1\n ] < self.vol_segmen.shape[0] - 1:\n self.index_segmen[-1] += 1\n self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])\n self.txt_segmen.set_text('Slice No: ' + str(self.index_segmen[-1]))\n self.fig.canvas.draw_idle()\n",
"step-2": "<mask token>\n\n\nclass ValidateWindowCtr(object):\n\n def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans,\n vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans,\n index_truth, index_segmen):\n self.fig = fig\n self.im_trans, self.im_truth, self.im_segmen = (im_trans, im_truth,\n im_segmen)\n self.vol_trans, self.vol_truth, self.vol_segmen = (vol_trans,\n vol_truth, vol_segmen)\n self.ax_trans, self.ax_truth, self.ax_segmen = (ax_trans, ax_truth,\n ax_segmen)\n self.index_trans, self.index_truth, self.index_segmen = (\n index_trans, index_truth, index_segmen)\n self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: ' + str(self\n .index_trans[-1]), color='b')\n self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: ' + str(self.\n index_truth[-1]), color='b')\n self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: ' + str(\n self.index_segmen[-1]), color='b')\n self.scroll_trans = None\n self.scroll_truth = None\n self.scroll_segmen = None\n self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)\n self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)\n <mask token>\n\n def fig_leave_event(self, event):\n self.fig.canvas.mpl_disconnect(self.scroll_trans)\n self.fig.canvas.mpl_disconnect(self.scroll_truth)\n self.fig.canvas.mpl_disconnect(self.scroll_segmen)\n <mask token>\n\n def truth_subplot_scroll(self, event):\n if event.button == 'down' and self.index_truth[-1\n ] > -1 * self.vol_truth.shape[0]:\n self.index_truth[-1] -= 1\n if event.button == 'up' and self.index_truth[-1\n ] < self.vol_truth.shape[0] - 1:\n self.index_truth[-1] += 1\n self.im_truth.set_data(self.vol_truth[self.index_truth[-1]])\n self.txt_truth.set_text('Slice No: ' + str(self.index_truth[-1]))\n self.fig.canvas.draw_idle()\n\n def segmen_subplot_scroll(self, event):\n if event.button == 'down' and self.index_segmen[-1\n ] > -1 * self.vol_segmen.shape[0]:\n self.index_segmen[-1] -= 1\n if event.button == 'up' and self.index_segmen[-1\n ] < self.vol_segmen.shape[0] - 1:\n self.index_segmen[-1] += 1\n self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])\n self.txt_segmen.set_text('Slice No: ' + str(self.index_segmen[-1]))\n self.fig.canvas.draw_idle()\n",
"step-3": "<mask token>\n\n\nclass ValidateWindowCtr(object):\n\n def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans,\n vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans,\n index_truth, index_segmen):\n self.fig = fig\n self.im_trans, self.im_truth, self.im_segmen = (im_trans, im_truth,\n im_segmen)\n self.vol_trans, self.vol_truth, self.vol_segmen = (vol_trans,\n vol_truth, vol_segmen)\n self.ax_trans, self.ax_truth, self.ax_segmen = (ax_trans, ax_truth,\n ax_segmen)\n self.index_trans, self.index_truth, self.index_segmen = (\n index_trans, index_truth, index_segmen)\n self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: ' + str(self\n .index_trans[-1]), color='b')\n self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: ' + str(self.\n index_truth[-1]), color='b')\n self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: ' + str(\n self.index_segmen[-1]), color='b')\n self.scroll_trans = None\n self.scroll_truth = None\n self.scroll_segmen = None\n self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)\n self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)\n\n def fig_enter_event(self, event):\n if self.ax_trans.in_axes(event):\n self.scroll_trans = self.fig.canvas.mpl_connect('scroll_event',\n self.trans_subplot_scroll)\n elif self.ax_truth.in_axes(event):\n self.scroll_truth = self.fig.canvas.mpl_connect('scroll_event',\n self.truth_subplot_scroll)\n elif self.ax_segmen.in_axes(event):\n self.scroll_segmen = self.fig.canvas.mpl_connect('scroll_event',\n self.segmen_subplot_scroll)\n\n def fig_leave_event(self, event):\n self.fig.canvas.mpl_disconnect(self.scroll_trans)\n self.fig.canvas.mpl_disconnect(self.scroll_truth)\n self.fig.canvas.mpl_disconnect(self.scroll_segmen)\n <mask token>\n\n def truth_subplot_scroll(self, event):\n if event.button == 'down' and self.index_truth[-1\n ] > -1 * self.vol_truth.shape[0]:\n self.index_truth[-1] -= 1\n if event.button == 'up' and self.index_truth[-1\n ] < self.vol_truth.shape[0] - 1:\n self.index_truth[-1] += 1\n self.im_truth.set_data(self.vol_truth[self.index_truth[-1]])\n self.txt_truth.set_text('Slice No: ' + str(self.index_truth[-1]))\n self.fig.canvas.draw_idle()\n\n def segmen_subplot_scroll(self, event):\n if event.button == 'down' and self.index_segmen[-1\n ] > -1 * self.vol_segmen.shape[0]:\n self.index_segmen[-1] -= 1\n if event.button == 'up' and self.index_segmen[-1\n ] < self.vol_segmen.shape[0] - 1:\n self.index_segmen[-1] += 1\n self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])\n self.txt_segmen.set_text('Slice No: ' + str(self.index_segmen[-1]))\n self.fig.canvas.draw_idle()\n",
"step-4": "<mask token>\n\n\nclass ValidateWindowCtr(object):\n\n def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans,\n vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans,\n index_truth, index_segmen):\n self.fig = fig\n self.im_trans, self.im_truth, self.im_segmen = (im_trans, im_truth,\n im_segmen)\n self.vol_trans, self.vol_truth, self.vol_segmen = (vol_trans,\n vol_truth, vol_segmen)\n self.ax_trans, self.ax_truth, self.ax_segmen = (ax_trans, ax_truth,\n ax_segmen)\n self.index_trans, self.index_truth, self.index_segmen = (\n index_trans, index_truth, index_segmen)\n self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: ' + str(self\n .index_trans[-1]), color='b')\n self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: ' + str(self.\n index_truth[-1]), color='b')\n self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: ' + str(\n self.index_segmen[-1]), color='b')\n self.scroll_trans = None\n self.scroll_truth = None\n self.scroll_segmen = None\n self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)\n self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)\n\n def fig_enter_event(self, event):\n if self.ax_trans.in_axes(event):\n self.scroll_trans = self.fig.canvas.mpl_connect('scroll_event',\n self.trans_subplot_scroll)\n elif self.ax_truth.in_axes(event):\n self.scroll_truth = self.fig.canvas.mpl_connect('scroll_event',\n self.truth_subplot_scroll)\n elif self.ax_segmen.in_axes(event):\n self.scroll_segmen = self.fig.canvas.mpl_connect('scroll_event',\n self.segmen_subplot_scroll)\n\n def fig_leave_event(self, event):\n self.fig.canvas.mpl_disconnect(self.scroll_trans)\n self.fig.canvas.mpl_disconnect(self.scroll_truth)\n self.fig.canvas.mpl_disconnect(self.scroll_segmen)\n\n def trans_subplot_scroll(self, event):\n if event.button == 'down' and self.index_trans[-1\n ] > -1 * self.vol_trans.shape[0]:\n self.index_trans[-1] -= 1\n if event.button == 'up' and self.index_trans[-1\n ] < self.vol_trans.shape[0] - 1:\n self.index_trans[-1] += 1\n self.im_trans.set_data(self.vol_trans[self.index_trans[-1]])\n self.txt_trans.set_text('Slice No: ' + str(self.index_trans[-1]))\n self.fig.canvas.draw_idle()\n\n def truth_subplot_scroll(self, event):\n if event.button == 'down' and self.index_truth[-1\n ] > -1 * self.vol_truth.shape[0]:\n self.index_truth[-1] -= 1\n if event.button == 'up' and self.index_truth[-1\n ] < self.vol_truth.shape[0] - 1:\n self.index_truth[-1] += 1\n self.im_truth.set_data(self.vol_truth[self.index_truth[-1]])\n self.txt_truth.set_text('Slice No: ' + str(self.index_truth[-1]))\n self.fig.canvas.draw_idle()\n\n def segmen_subplot_scroll(self, event):\n if event.button == 'down' and self.index_segmen[-1\n ] > -1 * self.vol_segmen.shape[0]:\n self.index_segmen[-1] -= 1\n if event.button == 'up' and self.index_segmen[-1\n ] < self.vol_segmen.shape[0] - 1:\n self.index_segmen[-1] += 1\n self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])\n self.txt_segmen.set_text('Slice No: ' + str(self.index_segmen[-1]))\n self.fig.canvas.draw_idle()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Apr 4 12:47:30 2019\r\nTitle: MP4-Medical Image Processing\r\n@author: MP4 Team\r\n\r\n\"\"\"\r\n\r\n# Validate window controller\r\nclass ValidateWindowCtr(object):\r\n # Initialization\r\n def __init__(self, fig, im_trans, im_truth, im_segmen, vol_trans, vol_truth, vol_segmen, ax_trans, ax_truth, ax_segmen, index_trans, index_truth, index_segmen):\r\n self.fig = fig\r\n self.im_trans, self.im_truth, self.im_segmen = im_trans, im_truth, im_segmen\r\n self.vol_trans, self.vol_truth, self.vol_segmen = vol_trans, vol_truth, vol_segmen\r\n self.ax_trans, self.ax_truth, self.ax_segmen = ax_trans, ax_truth, ax_segmen\r\n self.index_trans, self.index_truth, self.index_segmen = index_trans, index_truth, index_segmen\r\n \r\n self.txt_trans = self.ax_trans.text(0, 600, 'Slice No: '+str(self.index_trans[-1]), color='b')\r\n self.txt_truth = self.ax_truth.text(0, 10, 'Slice No: '+str(self.index_truth[-1]), color='b')\r\n self.txt_segmen = self.ax_segmen.text(0, 600, 'Slice No: '+str(self.index_segmen[-1]), color='b')\r\n \r\n self.scroll_trans = None\r\n self.scroll_truth = None\r\n self.scroll_segmen = None\r\n self.fig.canvas.mpl_connect('axes_enter_event', self.fig_enter_event)\r\n self.fig.canvas.mpl_connect('axes_leave_event', self.fig_leave_event)\r\n \r\n # Enable scrolling image\r\n def fig_enter_event(self, event):\r\n if self.ax_trans.in_axes(event):\r\n self.scroll_trans = self.fig.canvas.mpl_connect('scroll_event', self.trans_subplot_scroll)\r\n \r\n elif self.ax_truth.in_axes(event):\r\n self.scroll_truth = self.fig.canvas.mpl_connect('scroll_event', self.truth_subplot_scroll)\r\n \r\n elif self.ax_segmen.in_axes(event):\r\n self.scroll_segmen = self.fig.canvas.mpl_connect('scroll_event', self.segmen_subplot_scroll)\r\n \r\n # Disable scrolling image\r\n def fig_leave_event(self, event):\r\n self.fig.canvas.mpl_disconnect(self.scroll_trans)\r\n self.fig.canvas.mpl_disconnect(self.scroll_truth)\r\n self.fig.canvas.mpl_disconnect(self.scroll_segmen)\r\n \r\n # Scroll voxel image\r\n def trans_subplot_scroll(self, event): \r\n if event.button == 'down' and (self.index_trans[-1] > -1*self.vol_trans.shape[0]):\r\n self.index_trans[-1] -= 1\r\n \r\n if event.button == 'up' and (self.index_trans[-1] < self.vol_trans.shape[0]-1):\r\n self.index_trans[-1] += 1\r\n \r\n self.im_trans.set_data(self.vol_trans[self.index_trans[-1]])\r\n self.txt_trans.set_text('Slice No: '+str(self.index_trans[-1]))\r\n self.fig.canvas.draw_idle()\r\n \r\n # Scroll ground truth image\r\n def truth_subplot_scroll(self, event): \r\n if event.button == 'down' and (self.index_truth[-1] > -1*self.vol_truth.shape[0]):\r\n self.index_truth[-1] -= 1\r\n \r\n if event.button == 'up' and (self.index_truth[-1] < self.vol_truth.shape[0]-1):\r\n self.index_truth[-1] += 1\r\n \r\n self.im_truth.set_data(self.vol_truth[self.index_truth[-1]])\r\n self.txt_truth.set_text('Slice No: '+str(self.index_truth[-1]))\r\n self.fig.canvas.draw_idle()\r\n \r\n # Scroll segmented image\r\n def segmen_subplot_scroll(self, event): \r\n if event.button == 'down' and (self.index_segmen[-1] > -1*self.vol_segmen.shape[0]):\r\n self.index_segmen[-1] -= 1\r\n \r\n if event.button == 'up' and (self.index_segmen[-1] < self.vol_segmen.shape[0]-1):\r\n self.index_segmen[-1] += 1\r\n \r\n self.im_segmen.set_data(self.vol_segmen[self.index_segmen[-1]])\r\n self.txt_segmen.set_text('Slice No: '+str(self.index_segmen[-1])) \r\n self.fig.canvas.draw_idle()\r\n ",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class Config(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Config(object):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'
) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MONGODB_DB = 'project1'
MONGODB_HOST = 'mongodb'
MONGODB_PORT = 27017
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
basedir = os.path.abspath(os.path.dirname(__file__))
<|reserved_special_token_0|>
class Config(object):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'
) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MONGODB_DB = 'project1'
MONGODB_HOST = 'mongodb'
MONGODB_PORT = 27017
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
<|reserved_special_token_1|>
import os
basedir = os.path.abspath(os.path.dirname(__file__))
from datetime import datetime
class Config(object):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'
) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MONGODB_DB = 'project1'
MONGODB_HOST = 'mongodb'
MONGODB_PORT = 27017
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
<|reserved_special_token_1|>
import os
basedir = os.path.abspath(os.path.dirname(__file__))
from datetime import datetime
class Config(object):
# ...
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MONGODB_DB = 'project1'
MONGODB_HOST = 'mongodb'
MONGODB_PORT = 27017
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
|
flexible
|
{
"blob_id": "118380f58cd173d2de5572a1591766e38ca4a7f8",
"index": 8846,
"step-1": "<mask token>\n\n\nclass Config(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Config(object):\n SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'\n ) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MONGODB_DB = 'project1'\n MONGODB_HOST = 'mongodb'\n MONGODB_PORT = 27017\n SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'\n",
"step-3": "<mask token>\nbasedir = os.path.abspath(os.path.dirname(__file__))\n<mask token>\n\n\nclass Config(object):\n SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'\n ) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MONGODB_DB = 'project1'\n MONGODB_HOST = 'mongodb'\n MONGODB_PORT = 27017\n SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'\n",
"step-4": "import os\nbasedir = os.path.abspath(os.path.dirname(__file__))\nfrom datetime import datetime\n\n\nclass Config(object):\n SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL'\n ) or 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n MONGODB_DB = 'project1'\n MONGODB_HOST = 'mongodb'\n MONGODB_PORT = 27017\n SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'\n",
"step-5": "import os\nbasedir = os.path.abspath(os.path.dirname(__file__))\nfrom datetime import datetime\n\n\nclass Config(object):\n # ...\n SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \\\n 'postgres' or 'sqlite:///' + os.path.join(basedir, 'app.db')\n SQLALCHEMY_TRACK_MODIFICATIONS = False\n\n MONGODB_DB = 'project1'\n MONGODB_HOST = 'mongodb'\n MONGODB_PORT = 27017\n \n SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def song_inference():
sp_total_model_path = 'sp_total'
train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')
song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=
'utf-8')
plylst_tag = train['tags']
tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])
tag_dict = {x: tag_counter[x] for x in tag_counter}
tag_id_tid = dict()
tag_tid_id = dict()
for i, t in enumerate(tag_dict):
tag_id_tid[t] = i
tag_tid_id[i] = t
n_tags = len(tag_dict)
plylst_song = train['songs']
song_dict = {x: x for x in song['id']}
n_songs = len(song_dict)
train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
song_cate = []
for i in range(len(train)):
gnr = []
songs = train.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
train['plylst_genre'] = song_cate
plylst_genre = train['plylst_genre']
genre_counter = Counter([gen for genre in plylst_genre for gen in genre])
genre_dict = {x: genre_counter[x] for x in genre_counter}
genre_id_tid = dict()
genre_tid_id = dict()
for i, t in enumerate(genre_dict):
genre_id_tid[t] = i
genre_tid_id[i] = t
n_genre = len(genre_dict)
train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
gnr_array = np.zeros((len(train), n_genre))
for i, index in enumerate(train.index):
if i % 10000 == 0:
print(i)
counter = Counter(train.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_array[i][k] = c
gnr_array.shape
song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])
plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]
plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)
plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)
plylst_train = plylst_use
n_train = len(plylst_train)
row = np.repeat(range(n_train), plylst_train['num_songs'])
col = [song for songs in plylst_train['songs'] for song in songs]
dat = np.repeat(1, plylst_train['num_songs'].sum())
train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_songs))
row = np.repeat(range(n_train), plylst_train['num_tags'])
col = [tag for tags in plylst_train['tags_id'] for tag in tags]
dat = np.repeat(1, plylst_train['num_tags'].sum())
train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_tags))
train_user_songs_A_T = train_user_songs_A.T.tocsr()
train_user_songs_A_T
train_user_tags_A_T = train_user_tags_A.T.tocsr()
train_user_tags_A_T
val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(val)):
gnr = []
songs = val.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
val['plylst_genre'] = song_cate
val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
val.loc[:, 'num_songs'] = val['songs'].map(len)
val.loc[:, 'num_tags'] = val['tags_id'].map(len)
gnr_val = np.zeros((len(val), n_genre))
for i, index in enumerate(val.index):
if i % 10000 == 0:
print(i)
counter = Counter(val.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_val[i][k] = c
gnr_val.shape
n_val = len(val)
row = np.repeat(range(n_val), val['num_songs'])
col = [song for songs in val['songs'] for song in songs]
dat = np.repeat(1, val['num_songs'].sum())
val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)
)
row = np.repeat(range(n_val), val['num_tags'])
col = [tag for tags in val['tags_id'] for tag in tags]
dat = np.repeat(1, val['num_tags'].sum())
val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))
val_user_songs_A_T = val_user_songs_A.T.tocsr()
val_user_tags_A_T = val_user_tags_A.T.tocsr()
test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(test)):
gnr = []
songs = test.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
test['plylst_genre'] = song_cate
test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
test.loc[:, 'num_songs'] = test['songs'].map(len)
test.loc[:, 'num_tags'] = test['tags_id'].map(len)
gnr_test = np.zeros((len(test), n_genre))
for i, index in enumerate(test.index):
if i % 10000 == 0:
print(i)
counter = Counter(test.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_test[i][k] = c
gnr_test.shape
n_test = len(test)
row = np.repeat(range(n_test), test['num_songs'])
col = [song for songs in test['songs'] for song in songs]
dat = np.repeat(1, test['num_songs'].sum())
test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,
n_songs))
row = np.repeat(range(n_test), test['num_tags'])
col = [tag for tags in test['tags_id'] for tag in tags]
dat = np.repeat(1, test['num_tags'].sum())
test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)
)
test_user_songs_A_T = test_user_songs_A.T.tocsr()
test_user_tags_A_T = test_user_tags_A.T.tocsr()
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
arts = song['artist_id_basket'].map(lambda x: x[0])
arts = pd.DataFrame(arts)
art_counts = arts['artist_id_basket'].value_counts().reset_index()
art_counts.columns = ['artist_id_basket', 'counts']
arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])
song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]
song_art = song_art[['artist_id_basket']]
ART_cate = []
for i in tqdm_notebook(range(len(data_all))):
ART = []
songs = data_all.loc[i, 'songs']
for j in songs:
if j in song_art.index:
for k in song_art.loc[j, 'artist_id_basket']:
ART.append(k)
ART_cate.append(ART)
data_all['plylst_ARTIST'] = ART_cate
plylst_ARTIST = data_all['plylst_ARTIST']
ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in
ARTIST])
ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}
ARTIST_id_tid = dict()
ARTIST_tid_id = dict()
for i, t in enumerate(ARTIST_dict):
ARTIST_id_tid[t] = i
ARTIST_tid_id[i] = t
n_ARTIST = len(ARTIST_dict)
data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:
[ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])
ART_data_all = np.zeros((len(data_all), n_ARTIST))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])
for k, c in counter.items():
ART_data_all[i][k] = c
ART_data_all.shape
ART_array = ART_data_all[:len(train)]
ART_val = ART_data_all[len(train):len(train) + len(val)]
ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del ART_data_all
ART_array = sparse.csr_matrix(ART_array)
ART_val = sparse.csr_matrix(ART_val)
ART_test = sparse.csr_matrix(ART_test)
tim_cate = []
for i in tqdm_notebook(range(len(data_all))):
tim = []
songs = data_all.loc[i, 'songs']
for j in songs:
tim.append(song.loc[j, 'issue_date'])
tim_cate.append(tim)
data_all['plylst_times'] = tim_cate
plylst_times = data_all['plylst_times']
times_counter = Counter([tim for times in plylst_times for tim in times])
times_dict = {x: times_counter[x] for x in times_counter}
times_id_tid = dict()
times_tid_id = dict()
for i, t in enumerate(times_dict):
times_id_tid[t] = i
times_tid_id[i] = t
n_times = len(times_dict)
data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [
times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])
tim_data_all = np.zeros((len(data_all), n_times))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_times_id'])
for k, c in counter.items():
tim_data_all[i][k] = c
tim_array = tim_data_all[:len(train)]
tim_val = tim_data_all[len(train):len(train) + len(val)]
tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del tim_data_all
tim_array = sparse.csr_matrix(tim_array)
tim_val = sparse.csr_matrix(tim_val)
tim_test = sparse.csr_matrix(tim_test)
GEN_cate = []
for i in tqdm_notebook(range(len(data_all))):
GEN = []
songs = data_all.loc[i, 'songs']
for j in songs:
for k in song.loc[j, 'song_gn_gnr_basket']:
GEN.append(k)
GEN_cate.append(GEN)
data_all['plylst_GENRE'] = GEN_cate
plylst_GENRE = data_all['plylst_GENRE']
GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])
GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}
GENRE_id_tid = dict()
GENRE_tid_id = dict()
for i, t in enumerate(GENRE_dict):
GENRE_id_tid[t] = i
GENRE_tid_id[i] = t
n_GENRE = len(GENRE_dict)
data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [
GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])
GEN_data_all = np.zeros((len(data_all), n_GENRE))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_GENRE_id'])
for k, c in counter.items():
GEN_data_all[i][k] = c
GEN_array = GEN_data_all[:len(train)]
GEN_val = GEN_data_all[len(train):len(train) + len(val)]
GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del GEN_data_all
GEN_array = sparse.csr_matrix(GEN_array)
GEN_val = sparse.csr_matrix(GEN_val)
GEN_test = sparse.csr_matrix(GEN_test)
content = data_all['plylst_title']
if '{}.model'.format(sp_total_model_path) not in os.listdir():
makeSentencepieceModel(data_all, sp_total_model_path)
sp = SentencePieceProcessor()
sp.Load('{}.model'.format(sp_total_model_path))
cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)
content = data_all['plylst_title']
tdm = cv.fit_transform(content)
title_tdm = tdm.toarray()
title_tr = title_tdm[:len(train)]
title_va = title_tdm[len(train):len(train) + len(val)]
title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(
test)]
title_gnr = np.concatenate((gnr_array, title_tr), axis=1)
val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)
test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)
title_sp = sparse.csr_matrix(title_tdm)
title_gnr = sparse.csr_matrix(title_gnr)
val_title_gnr = sparse.csr_matrix(val_title_gnr)
test_title_gnr = sparse.csr_matrix(test_title_gnr)
title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])
song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])
tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])
times_sp = vstack([tim_array, tim_val, tim_test])
GEN_sp = vstack([GEN_array, GEN_val, GEN_test])
ART_sp = vstack([ART_array, ART_val, ART_test])
model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=25, n_jobs=-1)
model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=40, n_jobs=-1)
model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_song25.fit(song_sp)
model_knn_tag25.fit(tag_sp)
model_knn_title25.fit(title_sp)
model_knn_title_gnr25.fit(title_gnr)
model_knn_times25.fit(times_sp)
model_knn_GEN25.fit(GEN_sp)
model_knn_ART25.fit(ART_sp)
model_knn_song40.fit(song_sp)
model_knn_tag40.fit(tag_sp)
model_knn_title40.fit(title_sp)
model_knn_title_gnr40.fit(title_gnr)
model_knn_times40.fit(times_sp)
model_knn_GEN40.fit(GEN_sp)
model_knn_ART40.fit(ART_sp)
train.loc[:, 'num_songs'] = train['songs'].map(len)
train.loc[:, 'num_tags'] = train['tags_id'].map(len)
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
res = []
for i in tqdm_notebook(range(len(test))):
data = test.iloc[i]
pid = i
if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]
row = np.repeat(range(40), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(40), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title25.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': list(
cand_song_idx), 'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
for i in range(len(res)):
if len(res[i]['songs']) != 100:
print('song 에서 {}번째 오류 발생'.format(i))
if len(res[i]['tags']) != 10:
print('tag 에서 {}번째 오류 발생'.format(i))
rec = []
for i in range(len(res)):
rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),
'tags': res[i]['tags']})
result1 = pd.DataFrame(rec)
model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=50, n_jobs=-1)
model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_song.fit(song_sp)
model_knn_tag.fit(tag_sp)
model_knn_title.fit(title_sp)
model_knn_title_gnr.fit(title_gnr)
model_knn_times.fit(times_sp)
model_knn_GEN.fit(GEN_sp)
model_knn_ART.fit(ART_sp)
res2 = []
for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):
data = test.iloc[i]
pid = i
if len(data['songs']) != 0 and len(data['tags_id']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_tag * test_title_genre * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_title_genre * test_tim * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
pd.DataFrame(res2)
rec2 = []
for i in range(len(res2)):
rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),
'tags': res2[i]['tags']})
result2 = pd.DataFrame(rec2)['songs']
n_index = [10498, 6361, 1960, 8705, 9310]
result2.index = n_index
result1.loc[n_index, 'songs'] = result2
result1['songs'].apply(len).sort_values()
s = []
for song in train.songs.tolist():
s += song
r1 = dict(Counter(s))
r_song = sorted(r1.items(), key=lambda x: -x[1])
r_song_top = r_song[:100]
list_song = list(dict(r_song_top).keys())
len(list_song)
sub = []
for j in range(len(result1)):
sub.append(result1.loc[j].to_dict())
sub[6361]['songs'] = list_song
pd.DataFrame(sub)['songs'].apply(len).sort_values()
write_json(sub, 'final_songs.json')
return sub
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def song_inference():
sp_total_model_path = 'sp_total'
train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')
song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=
'utf-8')
plylst_tag = train['tags']
tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])
tag_dict = {x: tag_counter[x] for x in tag_counter}
tag_id_tid = dict()
tag_tid_id = dict()
for i, t in enumerate(tag_dict):
tag_id_tid[t] = i
tag_tid_id[i] = t
n_tags = len(tag_dict)
plylst_song = train['songs']
song_dict = {x: x for x in song['id']}
n_songs = len(song_dict)
train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
song_cate = []
for i in range(len(train)):
gnr = []
songs = train.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
train['plylst_genre'] = song_cate
plylst_genre = train['plylst_genre']
genre_counter = Counter([gen for genre in plylst_genre for gen in genre])
genre_dict = {x: genre_counter[x] for x in genre_counter}
genre_id_tid = dict()
genre_tid_id = dict()
for i, t in enumerate(genre_dict):
genre_id_tid[t] = i
genre_tid_id[i] = t
n_genre = len(genre_dict)
train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
gnr_array = np.zeros((len(train), n_genre))
for i, index in enumerate(train.index):
if i % 10000 == 0:
print(i)
counter = Counter(train.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_array[i][k] = c
gnr_array.shape
song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])
plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]
plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)
plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)
plylst_train = plylst_use
n_train = len(plylst_train)
row = np.repeat(range(n_train), plylst_train['num_songs'])
col = [song for songs in plylst_train['songs'] for song in songs]
dat = np.repeat(1, plylst_train['num_songs'].sum())
train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_songs))
row = np.repeat(range(n_train), plylst_train['num_tags'])
col = [tag for tags in plylst_train['tags_id'] for tag in tags]
dat = np.repeat(1, plylst_train['num_tags'].sum())
train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_tags))
train_user_songs_A_T = train_user_songs_A.T.tocsr()
train_user_songs_A_T
train_user_tags_A_T = train_user_tags_A.T.tocsr()
train_user_tags_A_T
val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(val)):
gnr = []
songs = val.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
val['plylst_genre'] = song_cate
val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
val.loc[:, 'num_songs'] = val['songs'].map(len)
val.loc[:, 'num_tags'] = val['tags_id'].map(len)
gnr_val = np.zeros((len(val), n_genre))
for i, index in enumerate(val.index):
if i % 10000 == 0:
print(i)
counter = Counter(val.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_val[i][k] = c
gnr_val.shape
n_val = len(val)
row = np.repeat(range(n_val), val['num_songs'])
col = [song for songs in val['songs'] for song in songs]
dat = np.repeat(1, val['num_songs'].sum())
val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)
)
row = np.repeat(range(n_val), val['num_tags'])
col = [tag for tags in val['tags_id'] for tag in tags]
dat = np.repeat(1, val['num_tags'].sum())
val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))
val_user_songs_A_T = val_user_songs_A.T.tocsr()
val_user_tags_A_T = val_user_tags_A.T.tocsr()
test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(test)):
gnr = []
songs = test.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
test['plylst_genre'] = song_cate
test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
test.loc[:, 'num_songs'] = test['songs'].map(len)
test.loc[:, 'num_tags'] = test['tags_id'].map(len)
gnr_test = np.zeros((len(test), n_genre))
for i, index in enumerate(test.index):
if i % 10000 == 0:
print(i)
counter = Counter(test.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_test[i][k] = c
gnr_test.shape
n_test = len(test)
row = np.repeat(range(n_test), test['num_songs'])
col = [song for songs in test['songs'] for song in songs]
dat = np.repeat(1, test['num_songs'].sum())
test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,
n_songs))
row = np.repeat(range(n_test), test['num_tags'])
col = [tag for tags in test['tags_id'] for tag in tags]
dat = np.repeat(1, test['num_tags'].sum())
test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)
)
test_user_songs_A_T = test_user_songs_A.T.tocsr()
test_user_tags_A_T = test_user_tags_A.T.tocsr()
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
arts = song['artist_id_basket'].map(lambda x: x[0])
arts = pd.DataFrame(arts)
art_counts = arts['artist_id_basket'].value_counts().reset_index()
art_counts.columns = ['artist_id_basket', 'counts']
arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])
song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]
song_art = song_art[['artist_id_basket']]
ART_cate = []
for i in tqdm_notebook(range(len(data_all))):
ART = []
songs = data_all.loc[i, 'songs']
for j in songs:
if j in song_art.index:
for k in song_art.loc[j, 'artist_id_basket']:
ART.append(k)
ART_cate.append(ART)
data_all['plylst_ARTIST'] = ART_cate
plylst_ARTIST = data_all['plylst_ARTIST']
ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in
ARTIST])
ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}
ARTIST_id_tid = dict()
ARTIST_tid_id = dict()
for i, t in enumerate(ARTIST_dict):
ARTIST_id_tid[t] = i
ARTIST_tid_id[i] = t
n_ARTIST = len(ARTIST_dict)
data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:
[ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])
ART_data_all = np.zeros((len(data_all), n_ARTIST))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])
for k, c in counter.items():
ART_data_all[i][k] = c
ART_data_all.shape
ART_array = ART_data_all[:len(train)]
ART_val = ART_data_all[len(train):len(train) + len(val)]
ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del ART_data_all
ART_array = sparse.csr_matrix(ART_array)
ART_val = sparse.csr_matrix(ART_val)
ART_test = sparse.csr_matrix(ART_test)
tim_cate = []
for i in tqdm_notebook(range(len(data_all))):
tim = []
songs = data_all.loc[i, 'songs']
for j in songs:
tim.append(song.loc[j, 'issue_date'])
tim_cate.append(tim)
data_all['plylst_times'] = tim_cate
plylst_times = data_all['plylst_times']
times_counter = Counter([tim for times in plylst_times for tim in times])
times_dict = {x: times_counter[x] for x in times_counter}
times_id_tid = dict()
times_tid_id = dict()
for i, t in enumerate(times_dict):
times_id_tid[t] = i
times_tid_id[i] = t
n_times = len(times_dict)
data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [
times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])
tim_data_all = np.zeros((len(data_all), n_times))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_times_id'])
for k, c in counter.items():
tim_data_all[i][k] = c
tim_array = tim_data_all[:len(train)]
tim_val = tim_data_all[len(train):len(train) + len(val)]
tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del tim_data_all
tim_array = sparse.csr_matrix(tim_array)
tim_val = sparse.csr_matrix(tim_val)
tim_test = sparse.csr_matrix(tim_test)
GEN_cate = []
for i in tqdm_notebook(range(len(data_all))):
GEN = []
songs = data_all.loc[i, 'songs']
for j in songs:
for k in song.loc[j, 'song_gn_gnr_basket']:
GEN.append(k)
GEN_cate.append(GEN)
data_all['plylst_GENRE'] = GEN_cate
plylst_GENRE = data_all['plylst_GENRE']
GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])
GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}
GENRE_id_tid = dict()
GENRE_tid_id = dict()
for i, t in enumerate(GENRE_dict):
GENRE_id_tid[t] = i
GENRE_tid_id[i] = t
n_GENRE = len(GENRE_dict)
data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [
GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])
GEN_data_all = np.zeros((len(data_all), n_GENRE))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_GENRE_id'])
for k, c in counter.items():
GEN_data_all[i][k] = c
GEN_array = GEN_data_all[:len(train)]
GEN_val = GEN_data_all[len(train):len(train) + len(val)]
GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del GEN_data_all
GEN_array = sparse.csr_matrix(GEN_array)
GEN_val = sparse.csr_matrix(GEN_val)
GEN_test = sparse.csr_matrix(GEN_test)
content = data_all['plylst_title']
if '{}.model'.format(sp_total_model_path) not in os.listdir():
makeSentencepieceModel(data_all, sp_total_model_path)
sp = SentencePieceProcessor()
sp.Load('{}.model'.format(sp_total_model_path))
cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)
content = data_all['plylst_title']
tdm = cv.fit_transform(content)
title_tdm = tdm.toarray()
title_tr = title_tdm[:len(train)]
title_va = title_tdm[len(train):len(train) + len(val)]
title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(
test)]
title_gnr = np.concatenate((gnr_array, title_tr), axis=1)
val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)
test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)
title_sp = sparse.csr_matrix(title_tdm)
title_gnr = sparse.csr_matrix(title_gnr)
val_title_gnr = sparse.csr_matrix(val_title_gnr)
test_title_gnr = sparse.csr_matrix(test_title_gnr)
title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])
song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])
tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])
times_sp = vstack([tim_array, tim_val, tim_test])
GEN_sp = vstack([GEN_array, GEN_val, GEN_test])
ART_sp = vstack([ART_array, ART_val, ART_test])
model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=25, n_jobs=-1)
model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=40, n_jobs=-1)
model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_song25.fit(song_sp)
model_knn_tag25.fit(tag_sp)
model_knn_title25.fit(title_sp)
model_knn_title_gnr25.fit(title_gnr)
model_knn_times25.fit(times_sp)
model_knn_GEN25.fit(GEN_sp)
model_knn_ART25.fit(ART_sp)
model_knn_song40.fit(song_sp)
model_knn_tag40.fit(tag_sp)
model_knn_title40.fit(title_sp)
model_knn_title_gnr40.fit(title_gnr)
model_knn_times40.fit(times_sp)
model_knn_GEN40.fit(GEN_sp)
model_knn_ART40.fit(ART_sp)
train.loc[:, 'num_songs'] = train['songs'].map(len)
train.loc[:, 'num_tags'] = train['tags_id'].map(len)
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
res = []
for i in tqdm_notebook(range(len(test))):
data = test.iloc[i]
pid = i
if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]
row = np.repeat(range(40), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(40), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title25.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': list(
cand_song_idx), 'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
for i in range(len(res)):
if len(res[i]['songs']) != 100:
print('song 에서 {}번째 오류 발생'.format(i))
if len(res[i]['tags']) != 10:
print('tag 에서 {}번째 오류 발생'.format(i))
rec = []
for i in range(len(res)):
rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),
'tags': res[i]['tags']})
result1 = pd.DataFrame(rec)
model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=50, n_jobs=-1)
model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_song.fit(song_sp)
model_knn_tag.fit(tag_sp)
model_knn_title.fit(title_sp)
model_knn_title_gnr.fit(title_gnr)
model_knn_times.fit(times_sp)
model_knn_GEN.fit(GEN_sp)
model_knn_ART.fit(ART_sp)
res2 = []
for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):
data = test.iloc[i]
pid = i
if len(data['songs']) != 0 and len(data['tags_id']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_tag * test_title_genre * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_title_genre * test_tim * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
pd.DataFrame(res2)
rec2 = []
for i in range(len(res2)):
rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),
'tags': res2[i]['tags']})
result2 = pd.DataFrame(rec2)['songs']
n_index = [10498, 6361, 1960, 8705, 9310]
result2.index = n_index
result1.loc[n_index, 'songs'] = result2
result1['songs'].apply(len).sort_values()
s = []
for song in train.songs.tolist():
s += song
r1 = dict(Counter(s))
r_song = sorted(r1.items(), key=lambda x: -x[1])
r_song_top = r_song[:100]
list_song = list(dict(r_song_top).keys())
len(list_song)
sub = []
for j in range(len(result1)):
sub.append(result1.loc[j].to_dict())
sub[6361]['songs'] = list_song
pd.DataFrame(sub)['songs'].apply(len).sort_values()
write_json(sub, 'final_songs.json')
return sub
if __name__ == '__main__':
_data = Dataset()
pre_tag.run(_data.test, _data.n_songs, _data.n_tags, _data.spr_list,
_data.tag_tid_id)
final_tags = word2vec_for_tag.run(_data.total, _data.test)
final_songs = song_inference()
result = []
for f_songs, f_tags in zip(final_songs, final_tags):
result.append({'id': f_songs['id'], 'songs': f_songs['songs'],
'tags': f_tags['tags']})
write_json(result, 'results.json')
<|reserved_special_token_1|>
from datetime import timedelta, datetime
import glob
import json
import os
import re
import pickle
import os, time
import pandas as pd
import numpy as np
from collections import Counter
from sentencepiece import SentencePieceTrainer
from sentencepiece import SentencePieceProcessor
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from scipy.sparse import vstack
from scipy import sparse
import scipy.sparse as spr
from scipy.sparse import vstack
from scipy import sparse
from util import write_json, makeSentencepieceModel
from sklearn.feature_extraction.text import CountVectorizer
from tqdm import tqdm_notebook
from sklearn.neighbors import NearestNeighbors
from Dataset import Dataset
import pre_tag, word2vec_for_tag
def song_inference():
sp_total_model_path = 'sp_total'
train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')
song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=
'utf-8')
plylst_tag = train['tags']
tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])
tag_dict = {x: tag_counter[x] for x in tag_counter}
tag_id_tid = dict()
tag_tid_id = dict()
for i, t in enumerate(tag_dict):
tag_id_tid[t] = i
tag_tid_id[i] = t
n_tags = len(tag_dict)
plylst_song = train['songs']
song_dict = {x: x for x in song['id']}
n_songs = len(song_dict)
train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
song_cate = []
for i in range(len(train)):
gnr = []
songs = train.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
train['plylst_genre'] = song_cate
plylst_genre = train['plylst_genre']
genre_counter = Counter([gen for genre in plylst_genre for gen in genre])
genre_dict = {x: genre_counter[x] for x in genre_counter}
genre_id_tid = dict()
genre_tid_id = dict()
for i, t in enumerate(genre_dict):
genre_id_tid[t] = i
genre_tid_id[i] = t
n_genre = len(genre_dict)
train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
gnr_array = np.zeros((len(train), n_genre))
for i, index in enumerate(train.index):
if i % 10000 == 0:
print(i)
counter = Counter(train.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_array[i][k] = c
gnr_array.shape
song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])
plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]
plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)
plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)
plylst_train = plylst_use
n_train = len(plylst_train)
row = np.repeat(range(n_train), plylst_train['num_songs'])
col = [song for songs in plylst_train['songs'] for song in songs]
dat = np.repeat(1, plylst_train['num_songs'].sum())
train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_songs))
row = np.repeat(range(n_train), plylst_train['num_tags'])
col = [tag for tags in plylst_train['tags_id'] for tag in tags]
dat = np.repeat(1, plylst_train['num_tags'].sum())
train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,
n_tags))
train_user_songs_A_T = train_user_songs_A.T.tocsr()
train_user_songs_A_T
train_user_tags_A_T = train_user_tags_A.T.tocsr()
train_user_tags_A_T
val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(val)):
gnr = []
songs = val.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
val['plylst_genre'] = song_cate
val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
val.loc[:, 'num_songs'] = val['songs'].map(len)
val.loc[:, 'num_tags'] = val['tags_id'].map(len)
gnr_val = np.zeros((len(val), n_genre))
for i, index in enumerate(val.index):
if i % 10000 == 0:
print(i)
counter = Counter(val.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_val[i][k] = c
gnr_val.shape
n_val = len(val)
row = np.repeat(range(n_val), val['num_songs'])
col = [song for songs in val['songs'] for song in songs]
dat = np.repeat(1, val['num_songs'].sum())
val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)
)
row = np.repeat(range(n_val), val['num_tags'])
col = [tag for tags in val['tags_id'] for tag in tags]
dat = np.repeat(1, val['num_tags'].sum())
val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))
val_user_songs_A_T = val_user_songs_A.T.tocsr()
val_user_tags_A_T = val_user_tags_A.T.tocsr()
test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')
song_cate = []
for i in range(len(test)):
gnr = []
songs = test.iloc[i, 3]
for j in songs:
for k in song.loc[j, 'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
test['plylst_genre'] = song_cate
test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in
x if tag_id_tid.get(t) != None])
test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [
genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
test.loc[:, 'num_songs'] = test['songs'].map(len)
test.loc[:, 'num_tags'] = test['tags_id'].map(len)
gnr_test = np.zeros((len(test), n_genre))
for i, index in enumerate(test.index):
if i % 10000 == 0:
print(i)
counter = Counter(test.loc[index]['plylst_genre_id'])
for k, c in counter.items():
gnr_test[i][k] = c
gnr_test.shape
n_test = len(test)
row = np.repeat(range(n_test), test['num_songs'])
col = [song for songs in test['songs'] for song in songs]
dat = np.repeat(1, test['num_songs'].sum())
test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,
n_songs))
row = np.repeat(range(n_test), test['num_tags'])
col = [tag for tags in test['tags_id'] for tag in tags]
dat = np.repeat(1, test['num_tags'].sum())
test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)
)
test_user_songs_A_T = test_user_songs_A.T.tocsr()
test_user_tags_A_T = test_user_tags_A.T.tocsr()
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
arts = song['artist_id_basket'].map(lambda x: x[0])
arts = pd.DataFrame(arts)
art_counts = arts['artist_id_basket'].value_counts().reset_index()
art_counts.columns = ['artist_id_basket', 'counts']
arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])
song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]
song_art = song_art[['artist_id_basket']]
ART_cate = []
for i in tqdm_notebook(range(len(data_all))):
ART = []
songs = data_all.loc[i, 'songs']
for j in songs:
if j in song_art.index:
for k in song_art.loc[j, 'artist_id_basket']:
ART.append(k)
ART_cate.append(ART)
data_all['plylst_ARTIST'] = ART_cate
plylst_ARTIST = data_all['plylst_ARTIST']
ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in
ARTIST])
ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}
ARTIST_id_tid = dict()
ARTIST_tid_id = dict()
for i, t in enumerate(ARTIST_dict):
ARTIST_id_tid[t] = i
ARTIST_tid_id[i] = t
n_ARTIST = len(ARTIST_dict)
data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:
[ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])
ART_data_all = np.zeros((len(data_all), n_ARTIST))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])
for k, c in counter.items():
ART_data_all[i][k] = c
ART_data_all.shape
ART_array = ART_data_all[:len(train)]
ART_val = ART_data_all[len(train):len(train) + len(val)]
ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del ART_data_all
ART_array = sparse.csr_matrix(ART_array)
ART_val = sparse.csr_matrix(ART_val)
ART_test = sparse.csr_matrix(ART_test)
tim_cate = []
for i in tqdm_notebook(range(len(data_all))):
tim = []
songs = data_all.loc[i, 'songs']
for j in songs:
tim.append(song.loc[j, 'issue_date'])
tim_cate.append(tim)
data_all['plylst_times'] = tim_cate
plylst_times = data_all['plylst_times']
times_counter = Counter([tim for times in plylst_times for tim in times])
times_dict = {x: times_counter[x] for x in times_counter}
times_id_tid = dict()
times_tid_id = dict()
for i, t in enumerate(times_dict):
times_id_tid[t] = i
times_tid_id[i] = t
n_times = len(times_dict)
data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [
times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])
tim_data_all = np.zeros((len(data_all), n_times))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_times_id'])
for k, c in counter.items():
tim_data_all[i][k] = c
tim_array = tim_data_all[:len(train)]
tim_val = tim_data_all[len(train):len(train) + len(val)]
tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del tim_data_all
tim_array = sparse.csr_matrix(tim_array)
tim_val = sparse.csr_matrix(tim_val)
tim_test = sparse.csr_matrix(tim_test)
GEN_cate = []
for i in tqdm_notebook(range(len(data_all))):
GEN = []
songs = data_all.loc[i, 'songs']
for j in songs:
for k in song.loc[j, 'song_gn_gnr_basket']:
GEN.append(k)
GEN_cate.append(GEN)
data_all['plylst_GENRE'] = GEN_cate
plylst_GENRE = data_all['plylst_GENRE']
GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])
GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}
GENRE_id_tid = dict()
GENRE_tid_id = dict()
for i, t in enumerate(GENRE_dict):
GENRE_id_tid[t] = i
GENRE_tid_id[i] = t
n_GENRE = len(GENRE_dict)
data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [
GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])
GEN_data_all = np.zeros((len(data_all), n_GENRE))
for i, index in enumerate(data_all.index):
if i % 10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_GENRE_id'])
for k, c in counter.items():
GEN_data_all[i][k] = c
GEN_array = GEN_data_all[:len(train)]
GEN_val = GEN_data_all[len(train):len(train) + len(val)]
GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +
len(test)]
del GEN_data_all
GEN_array = sparse.csr_matrix(GEN_array)
GEN_val = sparse.csr_matrix(GEN_val)
GEN_test = sparse.csr_matrix(GEN_test)
content = data_all['plylst_title']
if '{}.model'.format(sp_total_model_path) not in os.listdir():
makeSentencepieceModel(data_all, sp_total_model_path)
sp = SentencePieceProcessor()
sp.Load('{}.model'.format(sp_total_model_path))
cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)
content = data_all['plylst_title']
tdm = cv.fit_transform(content)
title_tdm = tdm.toarray()
title_tr = title_tdm[:len(train)]
title_va = title_tdm[len(train):len(train) + len(val)]
title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(
test)]
title_gnr = np.concatenate((gnr_array, title_tr), axis=1)
val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)
test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)
title_sp = sparse.csr_matrix(title_tdm)
title_gnr = sparse.csr_matrix(title_gnr)
val_title_gnr = sparse.csr_matrix(val_title_gnr)
test_title_gnr = sparse.csr_matrix(test_title_gnr)
title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])
song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])
tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])
times_sp = vstack([tim_array, tim_val, tim_test])
GEN_sp = vstack([GEN_array, GEN_val, GEN_test])
ART_sp = vstack([ART_array, ART_val, ART_test])
model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=25, n_jobs=-1)
model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=25, n_jobs=-1)
model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=40, n_jobs=-1)
model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=40, n_jobs=-1)
model_knn_song25.fit(song_sp)
model_knn_tag25.fit(tag_sp)
model_knn_title25.fit(title_sp)
model_knn_title_gnr25.fit(title_gnr)
model_knn_times25.fit(times_sp)
model_knn_GEN25.fit(GEN_sp)
model_knn_ART25.fit(ART_sp)
model_knn_song40.fit(song_sp)
model_knn_tag40.fit(tag_sp)
model_knn_title40.fit(title_sp)
model_knn_title_gnr40.fit(title_gnr)
model_knn_times40.fit(times_sp)
model_knn_GEN40.fit(GEN_sp)
model_knn_ART40.fit(ART_sp)
train.loc[:, 'num_songs'] = train['songs'].map(len)
train.loc[:, 'num_tags'] = train['tags_id'].map(len)
data_all = pd.concat([train, val, test])
data_all.index = range(len(data_all))
res = []
for i in tqdm_notebook(range(len(test))):
data = test.iloc[i]
pid = i
if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]
row = np.repeat(range(40), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_tag * test_title_genre * test_tim *
test_GEN * test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(40), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i +
1])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[
1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[
1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = (test_song * test_title_genre * test_tim * test_GEN *
test_ART)
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False]
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1, cand2, how='outer', on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2
cand_song_idx = list(cand_all.sort_values(by=['pred'],
ascending=False)[:100]['index'])
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title25.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({'id': test.loc[pid, 'id'], 'songs': list(
cand_song_idx), 'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i
:i + 1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
for i in range(len(res)):
if len(res[i]['songs']) != 100:
print('song 에서 {}번째 오류 발생'.format(i))
if len(res[i]['tags']) != 10:
print('tag 에서 {}번째 오류 발생'.format(i))
rec = []
for i in range(len(res)):
rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),
'tags': res[i]['tags']})
result1 = pd.DataFrame(rec)
model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=
'brute', n_neighbors=50, n_jobs=-1)
model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',
n_neighbors=50, n_jobs=-1)
model_knn_song.fit(song_sp)
model_knn_tag.fit(tag_sp)
model_knn_title.fit(title_sp)
model_knn_title_gnr.fit(title_gnr)
model_knn_times.fit(times_sp)
model_knn_GEN.fit(GEN_sp)
model_knn_ART.fit(ART_sp)
res2 = []
for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):
data = test.iloc[i]
pid = i
if len(data['songs']) != 0 and len(data['tags_id']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
pp = np.zeros((n_tags, 1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
songs_already = data['songs']
tags_already = data['tags_id']
test_song = cosine_similarity(tra_song_sp, p.T)
test_tag = cosine_similarity(tra_tag_sp, pp.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_tag * test_title_genre * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['songs']) != 0:
p = np.zeros((707989, 1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs'])
col = [song for songs in tra_song['songs'] for song in songs]
dat = np.repeat(1, tra_song['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]
)[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(
test_title_gnr[i:i + 1])[1][0]]
test_song = cosine_similarity(tra_song_sp, p.T)
test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])
test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])
test_title_genre = cosine_similarity(tra_title_gnr,
test_title_gnr[i:i + 1])
testi = test_song * test_title_genre * test_tim * test_GEN
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
elif len(data['tags_id']) != 0:
p = np.zeros((n_tags, 1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_songs'])
col = [song for songs in tra_tag['songs'] for song in songs]
dat = np.repeat(1, tra_tag['num_songs'].sum())
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)
)
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data['songs']
tags_already = data['tags_id']
testi = cosine_similarity(tra_tag_sp, pp.T)
if len(data['plylst_title']) != 0:
tra_title_gnr = title_tdm[model_knn_title.kneighbors(
title_ts[i:i + 1])[1][0]]
testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +
1])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi)
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]
cand_song_idx = cand_song_idx[np.isin(cand_song_idx,
songs_already) == False][:100]
cand_tag = tra_tag_sp_T.dot(testi)
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==
False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
else:
cand_song = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +
1])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[
:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10
].index)
res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,
'tags': rec_tag_idx})
pd.DataFrame(res2)
rec2 = []
for i in range(len(res2)):
rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),
'tags': res2[i]['tags']})
result2 = pd.DataFrame(rec2)['songs']
n_index = [10498, 6361, 1960, 8705, 9310]
result2.index = n_index
result1.loc[n_index, 'songs'] = result2
result1['songs'].apply(len).sort_values()
s = []
for song in train.songs.tolist():
s += song
r1 = dict(Counter(s))
r_song = sorted(r1.items(), key=lambda x: -x[1])
r_song_top = r_song[:100]
list_song = list(dict(r_song_top).keys())
len(list_song)
sub = []
for j in range(len(result1)):
sub.append(result1.loc[j].to_dict())
sub[6361]['songs'] = list_song
pd.DataFrame(sub)['songs'].apply(len).sort_values()
write_json(sub, 'final_songs.json')
return sub
if __name__ == '__main__':
_data = Dataset()
pre_tag.run(_data.test, _data.n_songs, _data.n_tags, _data.spr_list,
_data.tag_tid_id)
final_tags = word2vec_for_tag.run(_data.total, _data.test)
final_songs = song_inference()
result = []
for f_songs, f_tags in zip(final_songs, final_tags):
result.append({'id': f_songs['id'], 'songs': f_songs['songs'],
'tags': f_tags['tags']})
write_json(result, 'results.json')
<|reserved_special_token_1|>
from datetime import timedelta, datetime
import glob
import json
import os
import re
import pickle
import os,time
import pandas as pd
import numpy as np
from collections import Counter
from sentencepiece import SentencePieceTrainer
from sentencepiece import SentencePieceProcessor
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from scipy.sparse import vstack
from scipy import sparse
import scipy.sparse as spr
from scipy.sparse import vstack
from scipy import sparse
from util import write_json,makeSentencepieceModel
from sklearn.feature_extraction.text import CountVectorizer
from tqdm import tqdm_notebook
from sklearn.neighbors import NearestNeighbors
from Dataset import Dataset
import pre_tag,word2vec_for_tag
def song_inference():
sp_total_model_path = "sp_total"
train = pd.read_json('./dataset/train.json', typ = 'frame',encoding='utf-8')
song = pd.read_json('./dataset/song_meta.json', typ = 'frame',encoding='utf-8')
plylst_tag = train['tags']
tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])
tag_dict = {x: tag_counter[x] for x in tag_counter}
tag_id_tid = dict()
tag_tid_id = dict()
for i, t in enumerate(tag_dict):
tag_id_tid[t] = i
tag_tid_id[i] = t
n_tags = len(tag_dict)
plylst_song = train['songs']
song_dict = {x: x for x in song['id']}
n_songs = len(song_dict)
train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])
# song genre 내용 가져오기.
song_cate = []
for i in range(len(train)):
gnr = []
songs = train.iloc[i,3]
for j in songs:
for k in song.loc[j,'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
train['plylst_genre'] = song_cate
plylst_genre = train['plylst_genre']
genre_counter = Counter([gen for genre in plylst_genre for gen in genre])
genre_dict = {x: genre_counter[x] for x in genre_counter}
genre_id_tid = dict()
genre_tid_id = dict()
for i, t in enumerate(genre_dict):
genre_id_tid[t] = i
genre_tid_id[i] = t
n_genre = len(genre_dict)
train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
gnr_array = np.zeros((len(train),n_genre))
for i,index in enumerate(train.index):
if i%10000 == 0:
print(i)
counter = Counter(train.loc[index]['plylst_genre_id'])
for (k,c) in counter.items():
gnr_array[i][k] = c
gnr_array.shape
song['issue_date'] = song['issue_date'].astype('str').map(lambda x : x[:6])
plylst_use = train[['plylst_title','updt_date','tags_id','songs']]
plylst_use.loc[:,'num_songs'] = plylst_use['songs'].map(len)
plylst_use.loc[:,'num_tags'] = plylst_use['tags_id'].map(len)
plylst_train = plylst_use
n_train = len(plylst_train)
row = np.repeat(range(n_train), plylst_train['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in plylst_train['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, plylst_train['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train, n_songs)) # csr_matrix 제작
row = np.repeat(range(n_train), plylst_train['num_tags'])
col = [tag for tags in plylst_train['tags_id'] for tag in tags]
dat = np.repeat(1, plylst_train['num_tags'].sum())
train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train, n_tags))
train_user_songs_A_T = train_user_songs_A.T.tocsr()
train_user_songs_A_T # 행에는 노래 columns에는 User 정보 삽입
train_user_tags_A_T = train_user_tags_A.T.tocsr()
train_user_tags_A_T # 행에는 Tangs columns에는 User 정보 삽입
val = pd.read_json('./dataset/val.json', typ = 'frame',encoding='utf-8')
song_cate = []
for i in range(len(val)):
gnr = []
songs = val.iloc[i,3]
for j in songs:
for k in song.loc[j,'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
val['plylst_genre'] = song_cate
val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])
val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
val.loc[:,'num_songs'] = val['songs'].map(len)
val.loc[:,'num_tags'] = val['tags_id'].map(len)
# val_title = cv.transform(val['plylst_title']).toarray()
gnr_val = np.zeros((len(val),n_genre))
for i,index in enumerate(val.index):
if i%10000 == 0:
print(i)
counter = Counter(val.loc[index]['plylst_genre_id'])
for (k,c) in counter.items():
gnr_val[i][k] = c
gnr_val.shape
n_val = len(val)
row = np.repeat(range(n_val), val['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in val['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, val['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)) # csr_matrix 제작
row = np.repeat(range(n_val), val['num_tags'])
col = [tag for tags in val['tags_id'] for tag in tags]
dat = np.repeat(1, val['num_tags'].sum())
val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))
val_user_songs_A_T = val_user_songs_A.T.tocsr()
val_user_tags_A_T = val_user_tags_A.T.tocsr()
test = pd.read_json('./dataset/test.json', typ = 'frame',encoding='utf-8')
song_cate = []
for i in range(len(test)):
gnr = []
songs = test.iloc[i,3]
for j in songs:
for k in song.loc[j,'song_gn_dtl_gnr_basket']:
gnr.append(k)
song_cate.append(gnr)
test['plylst_genre'] = song_cate
test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])
test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])
test.loc[:,'num_songs'] = test['songs'].map(len)
test.loc[:,'num_tags'] = test['tags_id'].map(len)
# test_title = cv.transform(test['plylst_title']).toarray()
gnr_test = np.zeros((len(test),n_genre))
for i,index in enumerate(test.index):
if i%10000 == 0:
print(i)
counter = Counter(test.loc[index]['plylst_genre_id'])
for (k,c) in counter.items():
gnr_test[i][k] = c
gnr_test.shape
n_test = len(test)
row = np.repeat(range(n_test), test['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in test['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, test['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_songs)) # csr_matrix 제작
row = np.repeat(range(n_test), test['num_tags'])
col = [tag for tags in test['tags_id'] for tag in tags]
dat = np.repeat(1, test['num_tags'].sum())
test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags))
test_user_songs_A_T = test_user_songs_A.T.tocsr()
test_user_tags_A_T = test_user_tags_A.T.tocsr()
data_all = pd.concat([train,val,test])
data_all.index = range(len(data_all))
arts = song['artist_id_basket'].map(lambda x : x[0])
arts = pd.DataFrame(arts)
art_counts = arts['artist_id_basket'].value_counts().reset_index()
art_counts.columns = ['artist_id_basket','counts']
arts2 = pd.merge(arts,art_counts,how='left',on=['artist_id_basket'])
song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]
song_art = song_art[['artist_id_basket']]
#아티스트 대분류
ART_cate = []
for i in tqdm_notebook(range(len(data_all))):
ART = []
songs = data_all.loc[i,'songs']
for j in songs:
if j in song_art.index :
for k in song_art.loc[j,'artist_id_basket'] :
ART.append(k)
ART_cate.append(ART)
data_all['plylst_ARTIST'] = ART_cate
plylst_ARTIST = data_all['plylst_ARTIST']
ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in ARTIST])
ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}
ARTIST_id_tid = dict()
ARTIST_tid_id = dict()
for i, t in enumerate(ARTIST_dict):
ARTIST_id_tid[t] = i
ARTIST_tid_id[i] = t
n_ARTIST = len(ARTIST_dict)
data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x: [ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])
ART_data_all = np.zeros((len(data_all),n_ARTIST))
for i,index in enumerate(data_all.index):
if i%10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])
for (k,c) in counter.items():
ART_data_all[i][k] = c
ART_data_all.shape
ART_array = ART_data_all[:len(train)]
ART_val = ART_data_all[len(train):len(train)+len(val)]
ART_test = ART_data_all[len(train)+len(val):len(train)+len(val)+len(test)]
# ART_data_all = sparse.csr_matrix(ART_data_all)
del ART_data_all
ART_array = sparse.csr_matrix(ART_array)
ART_val = sparse.csr_matrix(ART_val)
ART_test = sparse.csr_matrix(ART_test)
# song tim 내용 가져오기.
tim_cate = []
for i in tqdm_notebook(range(len(data_all))):
tim = []
songs = data_all.loc[i,'songs']
for j in songs:
tim.append(song.loc[j,'issue_date'])
tim_cate.append(tim)
data_all['plylst_times'] = tim_cate
plylst_times = data_all['plylst_times']
times_counter = Counter([tim for times in plylst_times for tim in times])
times_dict = {x: times_counter[x] for x in times_counter}
times_id_tid = dict()
times_tid_id = dict()
for i, t in enumerate(times_dict):
times_id_tid[t] = i
times_tid_id[i] = t
n_times = len(times_dict)
data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])
tim_data_all = np.zeros((len(data_all),n_times))
for i,index in enumerate(data_all.index):
if i%10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_times_id'])
for (k,c) in counter.items():
tim_data_all[i][k] = c
tim_array = tim_data_all[:len(train)]
tim_val = tim_data_all[len(train):len(train)+len(val)]
tim_test = tim_data_all[len(train)+len(val):len(train)+len(val)+len(test)]
# tim_data_all = sparse.csr_matrix(tim_data_all)
del tim_data_all
tim_array = sparse.csr_matrix(tim_array)
tim_val = sparse.csr_matrix(tim_val)
tim_test = sparse.csr_matrix(tim_test)
#장르 대분류
GEN_cate = []
for i in tqdm_notebook(range(len(data_all))):
GEN = []
songs = data_all.loc[i,'songs']
for j in songs:
for k in song.loc[j,'song_gn_gnr_basket'] :
GEN.append(k)
GEN_cate.append(GEN)
data_all['plylst_GENRE'] = GEN_cate
plylst_GENRE = data_all['plylst_GENRE']
GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])
GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}
GENRE_id_tid = dict()
GENRE_tid_id = dict()
for i, t in enumerate(GENRE_dict):
GENRE_id_tid[t] = i
GENRE_tid_id[i] = t
n_GENRE = len(GENRE_dict)
data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])
GEN_data_all = np.zeros((len(data_all),n_GENRE))
for i,index in enumerate(data_all.index):
if i%10000 == 0:
print(i)
counter = Counter(data_all.loc[index]['plylst_GENRE_id'])
for (k,c) in counter.items():
GEN_data_all[i][k] = c
GEN_array = GEN_data_all[:len(train)]
GEN_val = GEN_data_all[len(train):len(train)+len(val)]
GEN_test = GEN_data_all[len(train)+len(val):len(train)+len(val)+len(test)]
# GEN_data_all = sparse.csr_matrix(GEN_data_all)
del GEN_data_all
GEN_array = sparse.csr_matrix(GEN_array)
GEN_val = sparse.csr_matrix(GEN_val)
GEN_test = sparse.csr_matrix(GEN_test)
content = data_all['plylst_title']
if "{}.model".format(sp_total_model_path) not in os.listdir():
makeSentencepieceModel(data_all,sp_total_model_path)
sp = SentencePieceProcessor()
sp.Load("{}.model".format(sp_total_model_path))
cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)
content = data_all['plylst_title']
tdm = cv.fit_transform(content)
title_tdm = tdm.toarray()
title_tr = title_tdm[:len(train)]
title_va = title_tdm[len(train):len(train)+len(val)]
title_ts = title_tdm[len(train)+len(val):len(train)+len(val)+len(test)]
title_gnr = np.concatenate((gnr_array,title_tr),axis=1)
val_title_gnr = np.concatenate((gnr_val,title_va),axis=1)
test_title_gnr = np.concatenate((gnr_test,title_ts),axis=1)
title_sp = sparse.csr_matrix(title_tdm)
title_gnr = sparse.csr_matrix(title_gnr)
val_title_gnr = sparse.csr_matrix(val_title_gnr)
test_title_gnr = sparse.csr_matrix(test_title_gnr)
title_gnr = vstack([title_gnr,val_title_gnr,test_title_gnr])
song_sp = vstack([train_user_songs_A,val_user_songs_A,test_user_songs_A])
tag_sp = vstack([train_user_tags_A,val_user_tags_A,test_user_tags_A])
times_sp = vstack([tim_array,tim_val,tim_test])
GEN_sp = vstack([GEN_array,GEN_val,GEN_test])
ART_sp = vstack([ART_array,ART_val,ART_test])
# song_sp_T = song_sp.T.tocsr()
# tag_sp_T = tag_sp.T.tocsr()
model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)
model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)
model_knn_song25.fit(song_sp)
model_knn_tag25.fit(tag_sp)
model_knn_title25.fit(title_sp)
model_knn_title_gnr25.fit(title_gnr)
model_knn_times25.fit(times_sp)
model_knn_GEN25.fit(GEN_sp)
model_knn_ART25.fit(ART_sp)
model_knn_song40.fit(song_sp)
model_knn_tag40.fit(tag_sp)
model_knn_title40.fit(title_sp)
model_knn_title_gnr40.fit(title_gnr)
model_knn_times40.fit(times_sp)
model_knn_GEN40.fit(GEN_sp)
model_knn_ART40.fit(ART_sp)
train.loc[:,'num_songs'] = train['songs'].map(len)
train.loc[:,'num_tags'] = train['tags_id'].map(len)
data_all = pd.concat([train,val,test])
data_all.index = range(len(data_all))
res = []
for i in tqdm_notebook(range(len(test))):
data = test.iloc[i]
pid = i
if len(data['songs']) >= 2 and len(data['tags_id']) >=2 :
p = np.zeros((707989,1))
p[data['songs']] = 1
pp = np.zeros((n_tags,1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
songs_already = data["songs"]
tags_already = data["tags_id"]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tag = cosine_similarity(tra_tag_sp,pp.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song * test_tag * test_title_genre * test_tim * test_GEN * test_ART
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
####### 40 ####################################################
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]
row = np.repeat(range(40), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tag = cosine_similarity(tra_tag_sp,pp.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song * test_tag * test_title_genre * test_tim * test_GEN * test_ART
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1,cand2,how='outer',on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y'])/2
cand_song_idx = list(cand_all.sort_values(by=['pred'],ascending=False)[:100]['index'])
######tag######
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
elif len(data['songs']) != 0:
p = np.zeros((707989,1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
# tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]
row = np.repeat(range(25), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
songs_already = data["songs"]
tags_already = data["tags_id"]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song*test_title_genre*test_tim*test_GEN * test_ART
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거
cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
####### 40 ####################################################
tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]
row = np.repeat(range(40), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(40), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song * test_title_genre * test_tim * test_GEN * test_ART
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거
cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()
cand_all = pd.merge(cand1,cand2,how='outer',on='index')
cand_all = cand_all.fillna(0)
cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y'])/2
cand_song_idx = list(cand_all.sort_values(by=['pred'],ascending=False)[:100]['index'])
#######tag########
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
elif len(data['tags_id']) !=0:
p = np.zeros((n_tags,1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]
row = np.repeat(range(25), tra_tag['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_tag['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_tag['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(25), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data["songs"]
tags_already = data["tags_id"]
testi = cosine_similarity(tra_tag_sp,pp.T)
if len(data['plylst_title']) != 0 :
tra_title_gnr = title_tdm[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]]
testi_title = cosine_similarity(tra_title_gnr,title_ts[i:(i+1)])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res.append({
"id": test.loc[pid,'id'],
"songs": list(cand_song_idx),
"tags": rec_tag_idx
})
else :
cand_song = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10].index)
res.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
for i in range(len(res)):
if len(res[i]['songs']) != 100:
print('song 에서 {}번째 오류 발생'.format(i))
if len(res[i]['tags']) != 10:
print('tag 에서 {}번째 오류 발생'.format(i))
rec = []
for i in range(len(res)):
rec.append({
"id": res[i]['id'],
"songs": list(res[i]['songs']),
"tags": res[i]['tags']
})
result1 = pd.DataFrame(rec)
model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)
model_knn_song.fit(song_sp)
model_knn_tag.fit(tag_sp)
model_knn_title.fit(title_sp)
model_knn_title_gnr.fit(title_gnr)
model_knn_times.fit(times_sp)
model_knn_GEN.fit(GEN_sp)
model_knn_ART.fit(ART_sp)
res2 = []
for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):
data = test.iloc[i]
pid = i
if len(data['songs']) != 0 and len(data['tags_id']) != 0:
p = np.zeros((707989,1))
p[data['songs']] = 1
pp = np.zeros((n_tags,1))
pp[data['tags_id']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
songs_already = data["songs"]
tags_already = data["tags_id"]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tag = cosine_similarity(tra_tag_sp,pp.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song * test_tag * test_title_genre * test_GEN
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
elif len(data['songs']) != 0:
p = np.zeros((707989,1))
p[data['songs']] = 1
tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_song['num_tags'])
col = [tag for tags in tra_song['tags_id'] for tag in tags]
dat = np.repeat(1, tra_song['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data["songs"]
tags_already = data["tags_id"]
tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:(i+1)])[1][0]]
tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:(i+1)])[1][0]]
tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(test_title_gnr[i:(i+1)])[1][0]]
test_song = cosine_similarity(tra_song_sp,p.T)
test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])
test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])
test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])
testi = test_song*test_title_genre*test_tim*test_GEN
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
elif len(data['tags_id']) !=0:
p = np.zeros((n_tags,1))
p[data['tags_id']] = 1
tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]
row = np.repeat(range(50), tra_tag['num_songs']) # User Index 별 노래 개수만큼 만듦
col = [song for songs in tra_tag['songs'] for song in songs] # Song dic number 추출
dat = np.repeat(1, tra_tag['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦
tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작
tra_song_sp_T = tra_song_sp.T.tocsr()
row = np.repeat(range(50), tra_tag['num_tags'])
col = [tag for tags in tra_tag['tags_id'] for tag in tags]
dat = np.repeat(1, tra_tag['num_tags'].sum())
tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))
tra_tag_sp_T = tra_tag_sp.T.tocsr()
songs_already = data["songs"]
tags_already = data["tags_id"]
testi = cosine_similarity(tra_tag_sp,pp.T)
if len(data['plylst_title']) != 0 :
tra_title_gnr = title_tdm[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]]
testi_title = cosine_similarity(tra_title_gnr,title_ts[i:(i+1)])
testi = testi * testi_title
cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴
cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출
cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출
cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시
cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]
cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]
rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]
res2.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
else:
cand_song = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]].songs.to_list():
for j in li:
cand_song.append(j)
cand_tag = []
for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]].tags.to_list():
for j in li:
cand_tag.append(j)
cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[:100].index)
rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10].index)
res2.append({
"id": test.loc[pid,'id'],
"songs": cand_song_idx,
"tags": rec_tag_idx
})
pd.DataFrame(res2)
rec2 = []
for i in range(len(res2)):
rec2.append({
"id": res2[i]['id'],
"songs": list(res2[i]['songs']),
"tags": res2[i]['tags']
})
result2 = pd.DataFrame(rec2)['songs']
n_index = [10498,6361,1960,8705,9310]
result2.index = n_index
result1.loc[n_index,'songs'] = result2
result1['songs'].apply(len).sort_values()
#그럼에도 채워지지 않은 6361에 대해서 상위 100곡 추천
s = []
for song in train.songs.tolist():
s += song
r1 = dict(Counter(s))
r_song = sorted(r1.items(), key=lambda x: -x[1])
r_song_top = r_song[:100] # 몇 곡 할지도 정해야 함
list_song = list(dict(r_song_top).keys())
len(list_song)
sub= []
for j in range(len(result1)) :
sub.append(result1.loc[j].to_dict())
sub[6361]['songs'] = list_song
pd.DataFrame(sub)['songs'].apply(len).sort_values()
write_json(sub,'final_songs.json')
return sub
if __name__ == '__main__':
_data = Dataset()
pre_tag.run(_data.test,_data.n_songs,_data.n_tags,_data.spr_list,_data.tag_tid_id)
final_tags = word2vec_for_tag.run(_data.total,_data.test)
final_songs = song_inference()
result = []
for f_songs, f_tags in zip(final_songs,final_tags):
result.append({
'id':f_songs['id'],
'songs':f_songs['songs'],
'tags':f_tags['tags']
})
write_json(result, 'results.json')
|
flexible
|
{
"blob_id": "05573b4ff68ca8638f8e13946b410df2a012840a",
"index": 1829,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef song_inference():\n sp_total_model_path = 'sp_total'\n train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')\n song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=\n 'utf-8')\n plylst_tag = train['tags']\n tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])\n tag_dict = {x: tag_counter[x] for x in tag_counter}\n tag_id_tid = dict()\n tag_tid_id = dict()\n for i, t in enumerate(tag_dict):\n tag_id_tid[t] = i\n tag_tid_id[i] = t\n n_tags = len(tag_dict)\n plylst_song = train['songs']\n song_dict = {x: x for x in song['id']}\n n_songs = len(song_dict)\n train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n song_cate = []\n for i in range(len(train)):\n gnr = []\n songs = train.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n train['plylst_genre'] = song_cate\n plylst_genre = train['plylst_genre']\n genre_counter = Counter([gen for genre in plylst_genre for gen in genre])\n genre_dict = {x: genre_counter[x] for x in genre_counter}\n genre_id_tid = dict()\n genre_tid_id = dict()\n for i, t in enumerate(genre_dict):\n genre_id_tid[t] = i\n genre_tid_id[i] = t\n n_genre = len(genre_dict)\n train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n gnr_array = np.zeros((len(train), n_genre))\n for i, index in enumerate(train.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(train.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_array[i][k] = c\n gnr_array.shape\n song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])\n plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]\n plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)\n plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)\n plylst_train = plylst_use\n n_train = len(plylst_train)\n row = np.repeat(range(n_train), plylst_train['num_songs'])\n col = [song for songs in plylst_train['songs'] for song in songs]\n dat = np.repeat(1, plylst_train['num_songs'].sum())\n train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_songs))\n row = np.repeat(range(n_train), plylst_train['num_tags'])\n col = [tag for tags in plylst_train['tags_id'] for tag in tags]\n dat = np.repeat(1, plylst_train['num_tags'].sum())\n train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_tags))\n train_user_songs_A_T = train_user_songs_A.T.tocsr()\n train_user_songs_A_T\n train_user_tags_A_T = train_user_tags_A.T.tocsr()\n train_user_tags_A_T\n val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(val)):\n gnr = []\n songs = val.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n val['plylst_genre'] = song_cate\n val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n val.loc[:, 'num_songs'] = val['songs'].map(len)\n val.loc[:, 'num_tags'] = val['tags_id'].map(len)\n gnr_val = np.zeros((len(val), n_genre))\n for i, index in enumerate(val.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(val.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_val[i][k] = c\n gnr_val.shape\n n_val = len(val)\n row = np.repeat(range(n_val), val['num_songs'])\n col = [song for songs in val['songs'] for song in songs]\n dat = np.repeat(1, val['num_songs'].sum())\n val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)\n )\n row = np.repeat(range(n_val), val['num_tags'])\n col = [tag for tags in val['tags_id'] for tag in tags]\n dat = np.repeat(1, val['num_tags'].sum())\n val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))\n val_user_songs_A_T = val_user_songs_A.T.tocsr()\n val_user_tags_A_T = val_user_tags_A.T.tocsr()\n test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(test)):\n gnr = []\n songs = test.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n test['plylst_genre'] = song_cate\n test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n test.loc[:, 'num_songs'] = test['songs'].map(len)\n test.loc[:, 'num_tags'] = test['tags_id'].map(len)\n gnr_test = np.zeros((len(test), n_genre))\n for i, index in enumerate(test.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(test.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_test[i][k] = c\n gnr_test.shape\n n_test = len(test)\n row = np.repeat(range(n_test), test['num_songs'])\n col = [song for songs in test['songs'] for song in songs]\n dat = np.repeat(1, test['num_songs'].sum())\n test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,\n n_songs))\n row = np.repeat(range(n_test), test['num_tags'])\n col = [tag for tags in test['tags_id'] for tag in tags]\n dat = np.repeat(1, test['num_tags'].sum())\n test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)\n )\n test_user_songs_A_T = test_user_songs_A.T.tocsr()\n test_user_tags_A_T = test_user_tags_A.T.tocsr()\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n arts = song['artist_id_basket'].map(lambda x: x[0])\n arts = pd.DataFrame(arts)\n art_counts = arts['artist_id_basket'].value_counts().reset_index()\n art_counts.columns = ['artist_id_basket', 'counts']\n arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])\n song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]\n song_art = song_art[['artist_id_basket']]\n ART_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n ART = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n if j in song_art.index:\n for k in song_art.loc[j, 'artist_id_basket']:\n ART.append(k)\n ART_cate.append(ART)\n data_all['plylst_ARTIST'] = ART_cate\n plylst_ARTIST = data_all['plylst_ARTIST']\n ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in\n ARTIST])\n ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}\n ARTIST_id_tid = dict()\n ARTIST_tid_id = dict()\n for i, t in enumerate(ARTIST_dict):\n ARTIST_id_tid[t] = i\n ARTIST_tid_id[i] = t\n n_ARTIST = len(ARTIST_dict)\n data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:\n [ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])\n ART_data_all = np.zeros((len(data_all), n_ARTIST))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])\n for k, c in counter.items():\n ART_data_all[i][k] = c\n ART_data_all.shape\n ART_array = ART_data_all[:len(train)]\n ART_val = ART_data_all[len(train):len(train) + len(val)]\n ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del ART_data_all\n ART_array = sparse.csr_matrix(ART_array)\n ART_val = sparse.csr_matrix(ART_val)\n ART_test = sparse.csr_matrix(ART_test)\n tim_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n tim = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n tim.append(song.loc[j, 'issue_date'])\n tim_cate.append(tim)\n data_all['plylst_times'] = tim_cate\n plylst_times = data_all['plylst_times']\n times_counter = Counter([tim for times in plylst_times for tim in times])\n times_dict = {x: times_counter[x] for x in times_counter}\n times_id_tid = dict()\n times_tid_id = dict()\n for i, t in enumerate(times_dict):\n times_id_tid[t] = i\n times_tid_id[i] = t\n n_times = len(times_dict)\n data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [\n times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])\n tim_data_all = np.zeros((len(data_all), n_times))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_times_id'])\n for k, c in counter.items():\n tim_data_all[i][k] = c\n tim_array = tim_data_all[:len(train)]\n tim_val = tim_data_all[len(train):len(train) + len(val)]\n tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del tim_data_all\n tim_array = sparse.csr_matrix(tim_array)\n tim_val = sparse.csr_matrix(tim_val)\n tim_test = sparse.csr_matrix(tim_test)\n GEN_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n GEN = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n for k in song.loc[j, 'song_gn_gnr_basket']:\n GEN.append(k)\n GEN_cate.append(GEN)\n data_all['plylst_GENRE'] = GEN_cate\n plylst_GENRE = data_all['plylst_GENRE']\n GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])\n GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}\n GENRE_id_tid = dict()\n GENRE_tid_id = dict()\n for i, t in enumerate(GENRE_dict):\n GENRE_id_tid[t] = i\n GENRE_tid_id[i] = t\n n_GENRE = len(GENRE_dict)\n data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [\n GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])\n GEN_data_all = np.zeros((len(data_all), n_GENRE))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_GENRE_id'])\n for k, c in counter.items():\n GEN_data_all[i][k] = c\n GEN_array = GEN_data_all[:len(train)]\n GEN_val = GEN_data_all[len(train):len(train) + len(val)]\n GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del GEN_data_all\n GEN_array = sparse.csr_matrix(GEN_array)\n GEN_val = sparse.csr_matrix(GEN_val)\n GEN_test = sparse.csr_matrix(GEN_test)\n content = data_all['plylst_title']\n if '{}.model'.format(sp_total_model_path) not in os.listdir():\n makeSentencepieceModel(data_all, sp_total_model_path)\n sp = SentencePieceProcessor()\n sp.Load('{}.model'.format(sp_total_model_path))\n cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)\n content = data_all['plylst_title']\n tdm = cv.fit_transform(content)\n title_tdm = tdm.toarray()\n title_tr = title_tdm[:len(train)]\n title_va = title_tdm[len(train):len(train) + len(val)]\n title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(\n test)]\n title_gnr = np.concatenate((gnr_array, title_tr), axis=1)\n val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)\n test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)\n title_sp = sparse.csr_matrix(title_tdm)\n title_gnr = sparse.csr_matrix(title_gnr)\n val_title_gnr = sparse.csr_matrix(val_title_gnr)\n test_title_gnr = sparse.csr_matrix(test_title_gnr)\n title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])\n song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])\n tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])\n times_sp = vstack([tim_array, tim_val, tim_test])\n GEN_sp = vstack([GEN_array, GEN_val, GEN_test])\n ART_sp = vstack([ART_array, ART_val, ART_test])\n model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=25, n_jobs=-1)\n model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=40, n_jobs=-1)\n model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_song25.fit(song_sp)\n model_knn_tag25.fit(tag_sp)\n model_knn_title25.fit(title_sp)\n model_knn_title_gnr25.fit(title_gnr)\n model_knn_times25.fit(times_sp)\n model_knn_GEN25.fit(GEN_sp)\n model_knn_ART25.fit(ART_sp)\n model_knn_song40.fit(song_sp)\n model_knn_tag40.fit(tag_sp)\n model_knn_title40.fit(title_sp)\n model_knn_title_gnr40.fit(title_gnr)\n model_knn_times40.fit(times_sp)\n model_knn_GEN40.fit(GEN_sp)\n model_knn_ART40.fit(ART_sp)\n train.loc[:, 'num_songs'] = train['songs'].map(len)\n train.loc[:, 'num_tags'] = train['tags_id'].map(len)\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n res = []\n for i in tqdm_notebook(range(len(test))):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(40), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(40), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title25.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': list(\n cand_song_idx), 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n for i in range(len(res)):\n if len(res[i]['songs']) != 100:\n print('song 에서 {}번째 오류 발생'.format(i))\n if len(res[i]['tags']) != 10:\n print('tag 에서 {}번째 오류 발생'.format(i))\n rec = []\n for i in range(len(res)):\n rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),\n 'tags': res[i]['tags']})\n result1 = pd.DataFrame(rec)\n model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=50, n_jobs=-1)\n model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_song.fit(song_sp)\n model_knn_tag.fit(tag_sp)\n model_knn_title.fit(title_sp)\n model_knn_title_gnr.fit(title_gnr)\n model_knn_times.fit(times_sp)\n model_knn_GEN.fit(GEN_sp)\n model_knn_ART.fit(ART_sp)\n res2 = []\n for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) != 0 and len(data['tags_id']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_tag * test_title_genre * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_title_genre * test_tim * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n pd.DataFrame(res2)\n rec2 = []\n for i in range(len(res2)):\n rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),\n 'tags': res2[i]['tags']})\n result2 = pd.DataFrame(rec2)['songs']\n n_index = [10498, 6361, 1960, 8705, 9310]\n result2.index = n_index\n result1.loc[n_index, 'songs'] = result2\n result1['songs'].apply(len).sort_values()\n s = []\n for song in train.songs.tolist():\n s += song\n r1 = dict(Counter(s))\n r_song = sorted(r1.items(), key=lambda x: -x[1])\n r_song_top = r_song[:100]\n list_song = list(dict(r_song_top).keys())\n len(list_song)\n sub = []\n for j in range(len(result1)):\n sub.append(result1.loc[j].to_dict())\n sub[6361]['songs'] = list_song\n pd.DataFrame(sub)['songs'].apply(len).sort_values()\n write_json(sub, 'final_songs.json')\n return sub\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef song_inference():\n sp_total_model_path = 'sp_total'\n train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')\n song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=\n 'utf-8')\n plylst_tag = train['tags']\n tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])\n tag_dict = {x: tag_counter[x] for x in tag_counter}\n tag_id_tid = dict()\n tag_tid_id = dict()\n for i, t in enumerate(tag_dict):\n tag_id_tid[t] = i\n tag_tid_id[i] = t\n n_tags = len(tag_dict)\n plylst_song = train['songs']\n song_dict = {x: x for x in song['id']}\n n_songs = len(song_dict)\n train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n song_cate = []\n for i in range(len(train)):\n gnr = []\n songs = train.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n train['plylst_genre'] = song_cate\n plylst_genre = train['plylst_genre']\n genre_counter = Counter([gen for genre in plylst_genre for gen in genre])\n genre_dict = {x: genre_counter[x] for x in genre_counter}\n genre_id_tid = dict()\n genre_tid_id = dict()\n for i, t in enumerate(genre_dict):\n genre_id_tid[t] = i\n genre_tid_id[i] = t\n n_genre = len(genre_dict)\n train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n gnr_array = np.zeros((len(train), n_genre))\n for i, index in enumerate(train.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(train.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_array[i][k] = c\n gnr_array.shape\n song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])\n plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]\n plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)\n plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)\n plylst_train = plylst_use\n n_train = len(plylst_train)\n row = np.repeat(range(n_train), plylst_train['num_songs'])\n col = [song for songs in plylst_train['songs'] for song in songs]\n dat = np.repeat(1, plylst_train['num_songs'].sum())\n train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_songs))\n row = np.repeat(range(n_train), plylst_train['num_tags'])\n col = [tag for tags in plylst_train['tags_id'] for tag in tags]\n dat = np.repeat(1, plylst_train['num_tags'].sum())\n train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_tags))\n train_user_songs_A_T = train_user_songs_A.T.tocsr()\n train_user_songs_A_T\n train_user_tags_A_T = train_user_tags_A.T.tocsr()\n train_user_tags_A_T\n val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(val)):\n gnr = []\n songs = val.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n val['plylst_genre'] = song_cate\n val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n val.loc[:, 'num_songs'] = val['songs'].map(len)\n val.loc[:, 'num_tags'] = val['tags_id'].map(len)\n gnr_val = np.zeros((len(val), n_genre))\n for i, index in enumerate(val.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(val.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_val[i][k] = c\n gnr_val.shape\n n_val = len(val)\n row = np.repeat(range(n_val), val['num_songs'])\n col = [song for songs in val['songs'] for song in songs]\n dat = np.repeat(1, val['num_songs'].sum())\n val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)\n )\n row = np.repeat(range(n_val), val['num_tags'])\n col = [tag for tags in val['tags_id'] for tag in tags]\n dat = np.repeat(1, val['num_tags'].sum())\n val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))\n val_user_songs_A_T = val_user_songs_A.T.tocsr()\n val_user_tags_A_T = val_user_tags_A.T.tocsr()\n test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(test)):\n gnr = []\n songs = test.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n test['plylst_genre'] = song_cate\n test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n test.loc[:, 'num_songs'] = test['songs'].map(len)\n test.loc[:, 'num_tags'] = test['tags_id'].map(len)\n gnr_test = np.zeros((len(test), n_genre))\n for i, index in enumerate(test.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(test.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_test[i][k] = c\n gnr_test.shape\n n_test = len(test)\n row = np.repeat(range(n_test), test['num_songs'])\n col = [song for songs in test['songs'] for song in songs]\n dat = np.repeat(1, test['num_songs'].sum())\n test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,\n n_songs))\n row = np.repeat(range(n_test), test['num_tags'])\n col = [tag for tags in test['tags_id'] for tag in tags]\n dat = np.repeat(1, test['num_tags'].sum())\n test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)\n )\n test_user_songs_A_T = test_user_songs_A.T.tocsr()\n test_user_tags_A_T = test_user_tags_A.T.tocsr()\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n arts = song['artist_id_basket'].map(lambda x: x[0])\n arts = pd.DataFrame(arts)\n art_counts = arts['artist_id_basket'].value_counts().reset_index()\n art_counts.columns = ['artist_id_basket', 'counts']\n arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])\n song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]\n song_art = song_art[['artist_id_basket']]\n ART_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n ART = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n if j in song_art.index:\n for k in song_art.loc[j, 'artist_id_basket']:\n ART.append(k)\n ART_cate.append(ART)\n data_all['plylst_ARTIST'] = ART_cate\n plylst_ARTIST = data_all['plylst_ARTIST']\n ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in\n ARTIST])\n ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}\n ARTIST_id_tid = dict()\n ARTIST_tid_id = dict()\n for i, t in enumerate(ARTIST_dict):\n ARTIST_id_tid[t] = i\n ARTIST_tid_id[i] = t\n n_ARTIST = len(ARTIST_dict)\n data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:\n [ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])\n ART_data_all = np.zeros((len(data_all), n_ARTIST))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])\n for k, c in counter.items():\n ART_data_all[i][k] = c\n ART_data_all.shape\n ART_array = ART_data_all[:len(train)]\n ART_val = ART_data_all[len(train):len(train) + len(val)]\n ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del ART_data_all\n ART_array = sparse.csr_matrix(ART_array)\n ART_val = sparse.csr_matrix(ART_val)\n ART_test = sparse.csr_matrix(ART_test)\n tim_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n tim = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n tim.append(song.loc[j, 'issue_date'])\n tim_cate.append(tim)\n data_all['plylst_times'] = tim_cate\n plylst_times = data_all['plylst_times']\n times_counter = Counter([tim for times in plylst_times for tim in times])\n times_dict = {x: times_counter[x] for x in times_counter}\n times_id_tid = dict()\n times_tid_id = dict()\n for i, t in enumerate(times_dict):\n times_id_tid[t] = i\n times_tid_id[i] = t\n n_times = len(times_dict)\n data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [\n times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])\n tim_data_all = np.zeros((len(data_all), n_times))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_times_id'])\n for k, c in counter.items():\n tim_data_all[i][k] = c\n tim_array = tim_data_all[:len(train)]\n tim_val = tim_data_all[len(train):len(train) + len(val)]\n tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del tim_data_all\n tim_array = sparse.csr_matrix(tim_array)\n tim_val = sparse.csr_matrix(tim_val)\n tim_test = sparse.csr_matrix(tim_test)\n GEN_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n GEN = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n for k in song.loc[j, 'song_gn_gnr_basket']:\n GEN.append(k)\n GEN_cate.append(GEN)\n data_all['plylst_GENRE'] = GEN_cate\n plylst_GENRE = data_all['plylst_GENRE']\n GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])\n GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}\n GENRE_id_tid = dict()\n GENRE_tid_id = dict()\n for i, t in enumerate(GENRE_dict):\n GENRE_id_tid[t] = i\n GENRE_tid_id[i] = t\n n_GENRE = len(GENRE_dict)\n data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [\n GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])\n GEN_data_all = np.zeros((len(data_all), n_GENRE))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_GENRE_id'])\n for k, c in counter.items():\n GEN_data_all[i][k] = c\n GEN_array = GEN_data_all[:len(train)]\n GEN_val = GEN_data_all[len(train):len(train) + len(val)]\n GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del GEN_data_all\n GEN_array = sparse.csr_matrix(GEN_array)\n GEN_val = sparse.csr_matrix(GEN_val)\n GEN_test = sparse.csr_matrix(GEN_test)\n content = data_all['plylst_title']\n if '{}.model'.format(sp_total_model_path) not in os.listdir():\n makeSentencepieceModel(data_all, sp_total_model_path)\n sp = SentencePieceProcessor()\n sp.Load('{}.model'.format(sp_total_model_path))\n cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)\n content = data_all['plylst_title']\n tdm = cv.fit_transform(content)\n title_tdm = tdm.toarray()\n title_tr = title_tdm[:len(train)]\n title_va = title_tdm[len(train):len(train) + len(val)]\n title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(\n test)]\n title_gnr = np.concatenate((gnr_array, title_tr), axis=1)\n val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)\n test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)\n title_sp = sparse.csr_matrix(title_tdm)\n title_gnr = sparse.csr_matrix(title_gnr)\n val_title_gnr = sparse.csr_matrix(val_title_gnr)\n test_title_gnr = sparse.csr_matrix(test_title_gnr)\n title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])\n song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])\n tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])\n times_sp = vstack([tim_array, tim_val, tim_test])\n GEN_sp = vstack([GEN_array, GEN_val, GEN_test])\n ART_sp = vstack([ART_array, ART_val, ART_test])\n model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=25, n_jobs=-1)\n model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=40, n_jobs=-1)\n model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_song25.fit(song_sp)\n model_knn_tag25.fit(tag_sp)\n model_knn_title25.fit(title_sp)\n model_knn_title_gnr25.fit(title_gnr)\n model_knn_times25.fit(times_sp)\n model_knn_GEN25.fit(GEN_sp)\n model_knn_ART25.fit(ART_sp)\n model_knn_song40.fit(song_sp)\n model_knn_tag40.fit(tag_sp)\n model_knn_title40.fit(title_sp)\n model_knn_title_gnr40.fit(title_gnr)\n model_knn_times40.fit(times_sp)\n model_knn_GEN40.fit(GEN_sp)\n model_knn_ART40.fit(ART_sp)\n train.loc[:, 'num_songs'] = train['songs'].map(len)\n train.loc[:, 'num_tags'] = train['tags_id'].map(len)\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n res = []\n for i in tqdm_notebook(range(len(test))):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(40), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(40), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title25.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': list(\n cand_song_idx), 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n for i in range(len(res)):\n if len(res[i]['songs']) != 100:\n print('song 에서 {}번째 오류 발생'.format(i))\n if len(res[i]['tags']) != 10:\n print('tag 에서 {}번째 오류 발생'.format(i))\n rec = []\n for i in range(len(res)):\n rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),\n 'tags': res[i]['tags']})\n result1 = pd.DataFrame(rec)\n model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=50, n_jobs=-1)\n model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_song.fit(song_sp)\n model_knn_tag.fit(tag_sp)\n model_knn_title.fit(title_sp)\n model_knn_title_gnr.fit(title_gnr)\n model_knn_times.fit(times_sp)\n model_knn_GEN.fit(GEN_sp)\n model_knn_ART.fit(ART_sp)\n res2 = []\n for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) != 0 and len(data['tags_id']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_tag * test_title_genre * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_title_genre * test_tim * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n pd.DataFrame(res2)\n rec2 = []\n for i in range(len(res2)):\n rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),\n 'tags': res2[i]['tags']})\n result2 = pd.DataFrame(rec2)['songs']\n n_index = [10498, 6361, 1960, 8705, 9310]\n result2.index = n_index\n result1.loc[n_index, 'songs'] = result2\n result1['songs'].apply(len).sort_values()\n s = []\n for song in train.songs.tolist():\n s += song\n r1 = dict(Counter(s))\n r_song = sorted(r1.items(), key=lambda x: -x[1])\n r_song_top = r_song[:100]\n list_song = list(dict(r_song_top).keys())\n len(list_song)\n sub = []\n for j in range(len(result1)):\n sub.append(result1.loc[j].to_dict())\n sub[6361]['songs'] = list_song\n pd.DataFrame(sub)['songs'].apply(len).sort_values()\n write_json(sub, 'final_songs.json')\n return sub\n\n\nif __name__ == '__main__':\n _data = Dataset()\n pre_tag.run(_data.test, _data.n_songs, _data.n_tags, _data.spr_list,\n _data.tag_tid_id)\n final_tags = word2vec_for_tag.run(_data.total, _data.test)\n final_songs = song_inference()\n result = []\n for f_songs, f_tags in zip(final_songs, final_tags):\n result.append({'id': f_songs['id'], 'songs': f_songs['songs'],\n 'tags': f_tags['tags']})\n write_json(result, 'results.json')\n",
"step-4": "from datetime import timedelta, datetime\nimport glob\nimport json\nimport os\nimport re\nimport pickle\nimport os, time\nimport pandas as pd\nimport numpy as np\nfrom collections import Counter\nfrom sentencepiece import SentencePieceTrainer\nfrom sentencepiece import SentencePieceProcessor\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.metrics.pairwise import cosine_similarity\nfrom scipy.sparse import vstack\nfrom scipy import sparse\nimport scipy.sparse as spr\nfrom scipy.sparse import vstack\nfrom scipy import sparse\nfrom util import write_json, makeSentencepieceModel\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom tqdm import tqdm_notebook\nfrom sklearn.neighbors import NearestNeighbors\nfrom Dataset import Dataset\nimport pre_tag, word2vec_for_tag\n\n\ndef song_inference():\n sp_total_model_path = 'sp_total'\n train = pd.read_json('./dataset/train.json', typ='frame', encoding='utf-8')\n song = pd.read_json('./dataset/song_meta.json', typ='frame', encoding=\n 'utf-8')\n plylst_tag = train['tags']\n tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])\n tag_dict = {x: tag_counter[x] for x in tag_counter}\n tag_id_tid = dict()\n tag_tid_id = dict()\n for i, t in enumerate(tag_dict):\n tag_id_tid[t] = i\n tag_tid_id[i] = t\n n_tags = len(tag_dict)\n plylst_song = train['songs']\n song_dict = {x: x for x in song['id']}\n n_songs = len(song_dict)\n train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n song_cate = []\n for i in range(len(train)):\n gnr = []\n songs = train.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n train['plylst_genre'] = song_cate\n plylst_genre = train['plylst_genre']\n genre_counter = Counter([gen for genre in plylst_genre for gen in genre])\n genre_dict = {x: genre_counter[x] for x in genre_counter}\n genre_id_tid = dict()\n genre_tid_id = dict()\n for i, t in enumerate(genre_dict):\n genre_id_tid[t] = i\n genre_tid_id[i] = t\n n_genre = len(genre_dict)\n train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n gnr_array = np.zeros((len(train), n_genre))\n for i, index in enumerate(train.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(train.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_array[i][k] = c\n gnr_array.shape\n song['issue_date'] = song['issue_date'].astype('str').map(lambda x: x[:6])\n plylst_use = train[['plylst_title', 'updt_date', 'tags_id', 'songs']]\n plylst_use.loc[:, 'num_songs'] = plylst_use['songs'].map(len)\n plylst_use.loc[:, 'num_tags'] = plylst_use['tags_id'].map(len)\n plylst_train = plylst_use\n n_train = len(plylst_train)\n row = np.repeat(range(n_train), plylst_train['num_songs'])\n col = [song for songs in plylst_train['songs'] for song in songs]\n dat = np.repeat(1, plylst_train['num_songs'].sum())\n train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_songs))\n row = np.repeat(range(n_train), plylst_train['num_tags'])\n col = [tag for tags in plylst_train['tags_id'] for tag in tags]\n dat = np.repeat(1, plylst_train['num_tags'].sum())\n train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train,\n n_tags))\n train_user_songs_A_T = train_user_songs_A.T.tocsr()\n train_user_songs_A_T\n train_user_tags_A_T = train_user_tags_A.T.tocsr()\n train_user_tags_A_T\n val = pd.read_json('./dataset/val.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(val)):\n gnr = []\n songs = val.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n val['plylst_genre'] = song_cate\n val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n val.loc[:, 'num_songs'] = val['songs'].map(len)\n val.loc[:, 'num_tags'] = val['tags_id'].map(len)\n gnr_val = np.zeros((len(val), n_genre))\n for i, index in enumerate(val.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(val.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_val[i][k] = c\n gnr_val.shape\n n_val = len(val)\n row = np.repeat(range(n_val), val['num_songs'])\n col = [song for songs in val['songs'] for song in songs]\n dat = np.repeat(1, val['num_songs'].sum())\n val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)\n )\n row = np.repeat(range(n_val), val['num_tags'])\n col = [tag for tags in val['tags_id'] for tag in tags]\n dat = np.repeat(1, val['num_tags'].sum())\n val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))\n val_user_songs_A_T = val_user_songs_A.T.tocsr()\n val_user_tags_A_T = val_user_tags_A.T.tocsr()\n test = pd.read_json('./dataset/test.json', typ='frame', encoding='utf-8')\n song_cate = []\n for i in range(len(test)):\n gnr = []\n songs = test.iloc[i, 3]\n for j in songs:\n for k in song.loc[j, 'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n test['plylst_genre'] = song_cate\n test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in\n x if tag_id_tid.get(t) != None])\n test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [\n genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n test.loc[:, 'num_songs'] = test['songs'].map(len)\n test.loc[:, 'num_tags'] = test['tags_id'].map(len)\n gnr_test = np.zeros((len(test), n_genre))\n for i, index in enumerate(test.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(test.loc[index]['plylst_genre_id'])\n for k, c in counter.items():\n gnr_test[i][k] = c\n gnr_test.shape\n n_test = len(test)\n row = np.repeat(range(n_test), test['num_songs'])\n col = [song for songs in test['songs'] for song in songs]\n dat = np.repeat(1, test['num_songs'].sum())\n test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test,\n n_songs))\n row = np.repeat(range(n_test), test['num_tags'])\n col = [tag for tags in test['tags_id'] for tag in tags]\n dat = np.repeat(1, test['num_tags'].sum())\n test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags)\n )\n test_user_songs_A_T = test_user_songs_A.T.tocsr()\n test_user_tags_A_T = test_user_tags_A.T.tocsr()\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n arts = song['artist_id_basket'].map(lambda x: x[0])\n arts = pd.DataFrame(arts)\n art_counts = arts['artist_id_basket'].value_counts().reset_index()\n art_counts.columns = ['artist_id_basket', 'counts']\n arts2 = pd.merge(arts, art_counts, how='left', on=['artist_id_basket'])\n song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]\n song_art = song_art[['artist_id_basket']]\n ART_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n ART = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n if j in song_art.index:\n for k in song_art.loc[j, 'artist_id_basket']:\n ART.append(k)\n ART_cate.append(ART)\n data_all['plylst_ARTIST'] = ART_cate\n plylst_ARTIST = data_all['plylst_ARTIST']\n ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in\n ARTIST])\n ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}\n ARTIST_id_tid = dict()\n ARTIST_tid_id = dict()\n for i, t in enumerate(ARTIST_dict):\n ARTIST_id_tid[t] = i\n ARTIST_tid_id[i] = t\n n_ARTIST = len(ARTIST_dict)\n data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x:\n [ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])\n ART_data_all = np.zeros((len(data_all), n_ARTIST))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])\n for k, c in counter.items():\n ART_data_all[i][k] = c\n ART_data_all.shape\n ART_array = ART_data_all[:len(train)]\n ART_val = ART_data_all[len(train):len(train) + len(val)]\n ART_test = ART_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del ART_data_all\n ART_array = sparse.csr_matrix(ART_array)\n ART_val = sparse.csr_matrix(ART_val)\n ART_test = sparse.csr_matrix(ART_test)\n tim_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n tim = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n tim.append(song.loc[j, 'issue_date'])\n tim_cate.append(tim)\n data_all['plylst_times'] = tim_cate\n plylst_times = data_all['plylst_times']\n times_counter = Counter([tim for times in plylst_times for tim in times])\n times_dict = {x: times_counter[x] for x in times_counter}\n times_id_tid = dict()\n times_tid_id = dict()\n for i, t in enumerate(times_dict):\n times_id_tid[t] = i\n times_tid_id[i] = t\n n_times = len(times_dict)\n data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [\n times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])\n tim_data_all = np.zeros((len(data_all), n_times))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_times_id'])\n for k, c in counter.items():\n tim_data_all[i][k] = c\n tim_array = tim_data_all[:len(train)]\n tim_val = tim_data_all[len(train):len(train) + len(val)]\n tim_test = tim_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del tim_data_all\n tim_array = sparse.csr_matrix(tim_array)\n tim_val = sparse.csr_matrix(tim_val)\n tim_test = sparse.csr_matrix(tim_test)\n GEN_cate = []\n for i in tqdm_notebook(range(len(data_all))):\n GEN = []\n songs = data_all.loc[i, 'songs']\n for j in songs:\n for k in song.loc[j, 'song_gn_gnr_basket']:\n GEN.append(k)\n GEN_cate.append(GEN)\n data_all['plylst_GENRE'] = GEN_cate\n plylst_GENRE = data_all['plylst_GENRE']\n GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])\n GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}\n GENRE_id_tid = dict()\n GENRE_tid_id = dict()\n for i, t in enumerate(GENRE_dict):\n GENRE_id_tid[t] = i\n GENRE_tid_id[i] = t\n n_GENRE = len(GENRE_dict)\n data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [\n GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])\n GEN_data_all = np.zeros((len(data_all), n_GENRE))\n for i, index in enumerate(data_all.index):\n if i % 10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_GENRE_id'])\n for k, c in counter.items():\n GEN_data_all[i][k] = c\n GEN_array = GEN_data_all[:len(train)]\n GEN_val = GEN_data_all[len(train):len(train) + len(val)]\n GEN_test = GEN_data_all[len(train) + len(val):len(train) + len(val) +\n len(test)]\n del GEN_data_all\n GEN_array = sparse.csr_matrix(GEN_array)\n GEN_val = sparse.csr_matrix(GEN_val)\n GEN_test = sparse.csr_matrix(GEN_test)\n content = data_all['plylst_title']\n if '{}.model'.format(sp_total_model_path) not in os.listdir():\n makeSentencepieceModel(data_all, sp_total_model_path)\n sp = SentencePieceProcessor()\n sp.Load('{}.model'.format(sp_total_model_path))\n cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)\n content = data_all['plylst_title']\n tdm = cv.fit_transform(content)\n title_tdm = tdm.toarray()\n title_tr = title_tdm[:len(train)]\n title_va = title_tdm[len(train):len(train) + len(val)]\n title_ts = title_tdm[len(train) + len(val):len(train) + len(val) + len(\n test)]\n title_gnr = np.concatenate((gnr_array, title_tr), axis=1)\n val_title_gnr = np.concatenate((gnr_val, title_va), axis=1)\n test_title_gnr = np.concatenate((gnr_test, title_ts), axis=1)\n title_sp = sparse.csr_matrix(title_tdm)\n title_gnr = sparse.csr_matrix(title_gnr)\n val_title_gnr = sparse.csr_matrix(val_title_gnr)\n test_title_gnr = sparse.csr_matrix(test_title_gnr)\n title_gnr = vstack([title_gnr, val_title_gnr, test_title_gnr])\n song_sp = vstack([train_user_songs_A, val_user_songs_A, test_user_songs_A])\n tag_sp = vstack([train_user_tags_A, val_user_tags_A, test_user_tags_A])\n times_sp = vstack([tim_array, tim_val, tim_test])\n GEN_sp = vstack([GEN_array, GEN_val, GEN_test])\n ART_sp = vstack([ART_array, ART_val, ART_test])\n model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=25, n_jobs=-1)\n model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=25, n_jobs=-1)\n model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=40, n_jobs=-1)\n model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=40, n_jobs=-1)\n model_knn_song25.fit(song_sp)\n model_knn_tag25.fit(tag_sp)\n model_knn_title25.fit(title_sp)\n model_knn_title_gnr25.fit(title_gnr)\n model_knn_times25.fit(times_sp)\n model_knn_GEN25.fit(GEN_sp)\n model_knn_ART25.fit(ART_sp)\n model_knn_song40.fit(song_sp)\n model_knn_tag40.fit(tag_sp)\n model_knn_title40.fit(title_sp)\n model_knn_title_gnr40.fit(title_gnr)\n model_knn_times40.fit(times_sp)\n model_knn_GEN40.fit(GEN_sp)\n model_knn_ART40.fit(ART_sp)\n train.loc[:, 'num_songs'] = train['songs'].map(len)\n train.loc[:, 'num_tags'] = train['tags_id'].map(len)\n data_all = pd.concat([train, val, test])\n data_all.index = range(len(data_all))\n res = []\n for i in tqdm_notebook(range(len(test))):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) >= 2 and len(data['tags_id']) >= 2:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(40), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_tag * test_title_genre * test_tim *\n test_GEN * test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(40), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:i + \n 1])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:i + 1])[\n 1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:i + 1])[\n 1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_ART = cosine_similarity(tra_ART, ART_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = (test_song * test_title_genre * test_tim * test_GEN *\n test_ART)\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False]\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n cand_all = pd.merge(cand1, cand2, how='outer', on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y']) / 2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],\n ascending=False)[:100]['index'])\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title25.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res.append({'id': test.loc[pid, 'id'], 'songs': list(\n cand_song_idx), 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i\n :i + 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n for i in range(len(res)):\n if len(res[i]['songs']) != 100:\n print('song 에서 {}번째 오류 발생'.format(i))\n if len(res[i]['tags']) != 10:\n print('tag 에서 {}번째 오류 발생'.format(i))\n rec = []\n for i in range(len(res)):\n rec.append({'id': res[i]['id'], 'songs': list(res[i]['songs']),\n 'tags': res[i]['tags']})\n result1 = pd.DataFrame(rec)\n model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm=\n 'brute', n_neighbors=50, n_jobs=-1)\n model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute',\n n_neighbors=50, n_jobs=-1)\n model_knn_song.fit(song_sp)\n model_knn_tag.fit(tag_sp)\n model_knn_title.fit(title_sp)\n model_knn_title_gnr.fit(title_gnr)\n model_knn_times.fit(times_sp)\n model_knn_GEN.fit(GEN_sp)\n model_knn_ART.fit(ART_sp)\n res2 = []\n for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):\n data = test.iloc[i]\n pid = i\n if len(data['songs']) != 0 and len(data['tags_id']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n pp = np.zeros((n_tags, 1))\n pp[data['tags_id']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n songs_already = data['songs']\n tags_already = data['tags_id']\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tag = cosine_similarity(tra_tag_sp, pp.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_tag * test_title_genre * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['songs']) != 0:\n p = np.zeros((707989, 1))\n p[data['songs']] = 1\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs'])\n col = [song for songs in tra_song['songs'] for song in songs]\n dat = np.repeat(1, tra_song['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:i + 1]\n )[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:i + 1])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(\n test_title_gnr[i:i + 1])[1][0]]\n test_song = cosine_similarity(tra_song_sp, p.T)\n test_tim = cosine_similarity(tra_tim, tim_test[i:i + 1])\n test_GEN = cosine_similarity(tra_GEN, GEN_test[i:i + 1])\n test_title_genre = cosine_similarity(tra_title_gnr,\n test_title_gnr[i:i + 1])\n testi = test_song * test_title_genre * test_tim * test_GEN\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n elif len(data['tags_id']) != 0:\n p = np.zeros((n_tags, 1))\n p[data['tags_id']] = 1\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_songs'])\n col = [song for songs in tra_tag['songs'] for song in songs]\n dat = np.repeat(1, tra_tag['num_songs'].sum())\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)\n )\n tra_song_sp_T = tra_song_sp.T.tocsr()\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n songs_already = data['songs']\n tags_already = data['tags_id']\n testi = cosine_similarity(tra_tag_sp, pp.T)\n if len(data['plylst_title']) != 0:\n tra_title_gnr = title_tdm[model_knn_title.kneighbors(\n title_ts[i:i + 1])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr, title_ts[i:i +\n 1])\n testi = testi * testi_title\n cand_song = tra_song_sp_T.dot(testi)\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1]\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx,\n songs_already) == False][:100]\n cand_tag = tra_tag_sp_T.dot(testi)\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) ==\n False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n cand_tag = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:i +\n 1])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[\n :100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10\n ].index)\n res2.append({'id': test.loc[pid, 'id'], 'songs': cand_song_idx,\n 'tags': rec_tag_idx})\n pd.DataFrame(res2)\n rec2 = []\n for i in range(len(res2)):\n rec2.append({'id': res2[i]['id'], 'songs': list(res2[i]['songs']),\n 'tags': res2[i]['tags']})\n result2 = pd.DataFrame(rec2)['songs']\n n_index = [10498, 6361, 1960, 8705, 9310]\n result2.index = n_index\n result1.loc[n_index, 'songs'] = result2\n result1['songs'].apply(len).sort_values()\n s = []\n for song in train.songs.tolist():\n s += song\n r1 = dict(Counter(s))\n r_song = sorted(r1.items(), key=lambda x: -x[1])\n r_song_top = r_song[:100]\n list_song = list(dict(r_song_top).keys())\n len(list_song)\n sub = []\n for j in range(len(result1)):\n sub.append(result1.loc[j].to_dict())\n sub[6361]['songs'] = list_song\n pd.DataFrame(sub)['songs'].apply(len).sort_values()\n write_json(sub, 'final_songs.json')\n return sub\n\n\nif __name__ == '__main__':\n _data = Dataset()\n pre_tag.run(_data.test, _data.n_songs, _data.n_tags, _data.spr_list,\n _data.tag_tid_id)\n final_tags = word2vec_for_tag.run(_data.total, _data.test)\n final_songs = song_inference()\n result = []\n for f_songs, f_tags in zip(final_songs, final_tags):\n result.append({'id': f_songs['id'], 'songs': f_songs['songs'],\n 'tags': f_tags['tags']})\n write_json(result, 'results.json')\n",
"step-5": "from datetime import timedelta, datetime\nimport glob\nimport json\nimport os\nimport re\nimport pickle\n\nimport os,time\nimport pandas as pd\nimport numpy as np\nfrom collections import Counter\nfrom sentencepiece import SentencePieceTrainer\nfrom sentencepiece import SentencePieceProcessor\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.metrics.pairwise import cosine_similarity\nfrom scipy.sparse import vstack\nfrom scipy import sparse\nimport scipy.sparse as spr\nfrom scipy.sparse import vstack\nfrom scipy import sparse\nfrom util import write_json,makeSentencepieceModel\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom tqdm import tqdm_notebook\nfrom sklearn.neighbors import NearestNeighbors\n\nfrom Dataset import Dataset\nimport pre_tag,word2vec_for_tag\n\ndef song_inference():\n sp_total_model_path = \"sp_total\"\n train = pd.read_json('./dataset/train.json', typ = 'frame',encoding='utf-8')\n song = pd.read_json('./dataset/song_meta.json', typ = 'frame',encoding='utf-8')\n plylst_tag = train['tags']\n tag_counter = Counter([tg for tgs in plylst_tag for tg in tgs])\n tag_dict = {x: tag_counter[x] for x in tag_counter}\n\n tag_id_tid = dict()\n tag_tid_id = dict()\n for i, t in enumerate(tag_dict):\n tag_id_tid[t] = i\n tag_tid_id[i] = t\n n_tags = len(tag_dict)\n\n plylst_song = train['songs']\n song_dict = {x: x for x in song['id']}\n\n n_songs = len(song_dict)\n\n train['tags_id'] = train['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])\n # song genre 내용 가져오기.\n song_cate = []\n\n for i in range(len(train)):\n gnr = []\n songs = train.iloc[i,3]\n\n for j in songs:\n for k in song.loc[j,'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n\n\n train['plylst_genre'] = song_cate\n\n plylst_genre = train['plylst_genre']\n genre_counter = Counter([gen for genre in plylst_genre for gen in genre])\n genre_dict = {x: genre_counter[x] for x in genre_counter}\n\n genre_id_tid = dict()\n genre_tid_id = dict()\n for i, t in enumerate(genre_dict):\n genre_id_tid[t] = i\n genre_tid_id[i] = t\n n_genre = len(genre_dict)\n train['plylst_genre_id'] = train['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n\n gnr_array = np.zeros((len(train),n_genre))\n for i,index in enumerate(train.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(train.loc[index]['plylst_genre_id'])\n for (k,c) in counter.items():\n gnr_array[i][k] = c\n gnr_array.shape\n\n song['issue_date'] = song['issue_date'].astype('str').map(lambda x : x[:6])\n\n plylst_use = train[['plylst_title','updt_date','tags_id','songs']]\n plylst_use.loc[:,'num_songs'] = plylst_use['songs'].map(len)\n plylst_use.loc[:,'num_tags'] = plylst_use['tags_id'].map(len)\n\n plylst_train = plylst_use\n\n n_train = len(plylst_train)\n row = np.repeat(range(n_train), plylst_train['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in plylst_train['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, plylst_train['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n train_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_train, n_songs)) # csr_matrix 제작\n\n row = np.repeat(range(n_train), plylst_train['num_tags'])\n col = [tag for tags in plylst_train['tags_id'] for tag in tags]\n dat = np.repeat(1, plylst_train['num_tags'].sum())\n train_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_train, n_tags))\n\n train_user_songs_A_T = train_user_songs_A.T.tocsr()\n train_user_songs_A_T # 행에는 노래 columns에는 User 정보 삽입\n\n train_user_tags_A_T = train_user_tags_A.T.tocsr()\n train_user_tags_A_T # 행에는 Tangs columns에는 User 정보 삽입\n\n val = pd.read_json('./dataset/val.json', typ = 'frame',encoding='utf-8')\n\n song_cate = []\n\n for i in range(len(val)):\n gnr = []\n songs = val.iloc[i,3]\n\n for j in songs:\n for k in song.loc[j,'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n\n val['plylst_genre'] = song_cate\n\n val['tags_id'] = val['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])\n val['plylst_genre_id'] = val['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n val.loc[:,'num_songs'] = val['songs'].map(len)\n val.loc[:,'num_tags'] = val['tags_id'].map(len)\n # val_title = cv.transform(val['plylst_title']).toarray()\n\n gnr_val = np.zeros((len(val),n_genre))\n for i,index in enumerate(val.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(val.loc[index]['plylst_genre_id'])\n for (k,c) in counter.items():\n gnr_val[i][k] = c\n gnr_val.shape\n\n n_val = len(val)\n row = np.repeat(range(n_val), val['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in val['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, val['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n val_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_songs)) # csr_matrix 제작\n\n row = np.repeat(range(n_val), val['num_tags'])\n col = [tag for tags in val['tags_id'] for tag in tags]\n dat = np.repeat(1, val['num_tags'].sum())\n val_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_val, n_tags))\n\n val_user_songs_A_T = val_user_songs_A.T.tocsr()\n val_user_tags_A_T = val_user_tags_A.T.tocsr()\n\n test = pd.read_json('./dataset/test.json', typ = 'frame',encoding='utf-8')\n\n song_cate = []\n\n for i in range(len(test)):\n gnr = []\n songs = test.iloc[i,3]\n\n for j in songs:\n for k in song.loc[j,'song_gn_dtl_gnr_basket']:\n gnr.append(k)\n song_cate.append(gnr)\n\n test['plylst_genre'] = song_cate\n\n test['tags_id'] = test['tags'].map(lambda x: [tag_id_tid.get(t) for t in x if tag_id_tid.get(t) != None])\n test['plylst_genre_id'] = test['plylst_genre'].map(lambda x: [genre_id_tid.get(s) for s in x if genre_id_tid.get(s) != None])\n test.loc[:,'num_songs'] = test['songs'].map(len)\n test.loc[:,'num_tags'] = test['tags_id'].map(len)\n # test_title = cv.transform(test['plylst_title']).toarray()\n\n gnr_test = np.zeros((len(test),n_genre))\n for i,index in enumerate(test.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(test.loc[index]['plylst_genre_id'])\n for (k,c) in counter.items():\n gnr_test[i][k] = c\n gnr_test.shape\n\n n_test = len(test)\n row = np.repeat(range(n_test), test['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in test['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, test['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n test_user_songs_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_songs)) # csr_matrix 제작\n\n row = np.repeat(range(n_test), test['num_tags'])\n col = [tag for tags in test['tags_id'] for tag in tags]\n dat = np.repeat(1, test['num_tags'].sum())\n test_user_tags_A = spr.csr_matrix((dat, (row, col)), shape=(n_test, n_tags))\n\n test_user_songs_A_T = test_user_songs_A.T.tocsr()\n test_user_tags_A_T = test_user_tags_A.T.tocsr()\n\n data_all = pd.concat([train,val,test])\n data_all.index = range(len(data_all))\n\n arts = song['artist_id_basket'].map(lambda x : x[0])\n\n arts = pd.DataFrame(arts)\n\n art_counts = arts['artist_id_basket'].value_counts().reset_index()\n art_counts.columns = ['artist_id_basket','counts']\n\n arts2 = pd.merge(arts,art_counts,how='left',on=['artist_id_basket'])\n\n song_art = song.iloc[arts2.query('counts >= 12')['artist_id_basket'].index]\n\n song_art = song_art[['artist_id_basket']]\n\n #아티스트 대분류\n ART_cate = []\n\n for i in tqdm_notebook(range(len(data_all))):\n ART = []\n songs = data_all.loc[i,'songs']\n\n for j in songs:\n if j in song_art.index :\n for k in song_art.loc[j,'artist_id_basket'] :\n ART.append(k)\n ART_cate.append(ART)\n\n\n data_all['plylst_ARTIST'] = ART_cate\n\n plylst_ARTIST = data_all['plylst_ARTIST']\n ARTIST_counter = Counter([ART for ARTIST in plylst_ARTIST for ART in ARTIST])\n ARTIST_dict = {x: ARTIST_counter[x] for x in ARTIST_counter}\n\n ARTIST_id_tid = dict()\n ARTIST_tid_id = dict()\n for i, t in enumerate(ARTIST_dict):\n ARTIST_id_tid[t] = i\n ARTIST_tid_id[i] = t\n n_ARTIST = len(ARTIST_dict)\n data_all['plylst_ARTIST_id'] = data_all['plylst_ARTIST'].map(lambda x: [ARTIST_id_tid.get(s) for s in x if ARTIST_id_tid.get(s) != None])\n\n ART_data_all = np.zeros((len(data_all),n_ARTIST))\n for i,index in enumerate(data_all.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_ARTIST_id'])\n for (k,c) in counter.items():\n ART_data_all[i][k] = c\n ART_data_all.shape\n\n ART_array = ART_data_all[:len(train)]\n ART_val = ART_data_all[len(train):len(train)+len(val)]\n ART_test = ART_data_all[len(train)+len(val):len(train)+len(val)+len(test)]\n\n\n # ART_data_all = sparse.csr_matrix(ART_data_all)\n del ART_data_all\n\n ART_array = sparse.csr_matrix(ART_array)\n ART_val = sparse.csr_matrix(ART_val)\n ART_test = sparse.csr_matrix(ART_test)\n\n # song tim 내용 가져오기.\n tim_cate = []\n\n for i in tqdm_notebook(range(len(data_all))):\n tim = []\n songs = data_all.loc[i,'songs']\n\n for j in songs:\n tim.append(song.loc[j,'issue_date'])\n tim_cate.append(tim)\n\n\n data_all['plylst_times'] = tim_cate\n\n plylst_times = data_all['plylst_times']\n times_counter = Counter([tim for times in plylst_times for tim in times])\n times_dict = {x: times_counter[x] for x in times_counter}\n\n times_id_tid = dict()\n times_tid_id = dict()\n for i, t in enumerate(times_dict):\n times_id_tid[t] = i\n times_tid_id[i] = t\n n_times = len(times_dict)\n data_all['plylst_times_id'] = data_all['plylst_times'].map(lambda x: [times_id_tid.get(s) for s in x if times_id_tid.get(s) != None])\n\n tim_data_all = np.zeros((len(data_all),n_times))\n for i,index in enumerate(data_all.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_times_id'])\n for (k,c) in counter.items():\n tim_data_all[i][k] = c\n\n tim_array = tim_data_all[:len(train)]\n tim_val = tim_data_all[len(train):len(train)+len(val)]\n tim_test = tim_data_all[len(train)+len(val):len(train)+len(val)+len(test)]\n\n # tim_data_all = sparse.csr_matrix(tim_data_all)\n del tim_data_all\n\n tim_array = sparse.csr_matrix(tim_array)\n tim_val = sparse.csr_matrix(tim_val)\n tim_test = sparse.csr_matrix(tim_test)\n\n #장르 대분류\n GEN_cate = []\n\n for i in tqdm_notebook(range(len(data_all))):\n GEN = []\n songs = data_all.loc[i,'songs']\n\n for j in songs:\n for k in song.loc[j,'song_gn_gnr_basket'] :\n GEN.append(k)\n GEN_cate.append(GEN)\n\n\n data_all['plylst_GENRE'] = GEN_cate\n\n plylst_GENRE = data_all['plylst_GENRE']\n GENRE_counter = Counter([GEN for GENRE in plylst_GENRE for GEN in GENRE])\n GENRE_dict = {x: GENRE_counter[x] for x in GENRE_counter}\n\n GENRE_id_tid = dict()\n GENRE_tid_id = dict()\n for i, t in enumerate(GENRE_dict):\n GENRE_id_tid[t] = i\n GENRE_tid_id[i] = t\n n_GENRE = len(GENRE_dict)\n data_all['plylst_GENRE_id'] = data_all['plylst_GENRE'].map(lambda x: [GENRE_id_tid.get(s) for s in x if GENRE_id_tid.get(s) != None])\n\n GEN_data_all = np.zeros((len(data_all),n_GENRE))\n for i,index in enumerate(data_all.index):\n if i%10000 == 0:\n print(i)\n counter = Counter(data_all.loc[index]['plylst_GENRE_id'])\n for (k,c) in counter.items():\n GEN_data_all[i][k] = c\n\n GEN_array = GEN_data_all[:len(train)]\n GEN_val = GEN_data_all[len(train):len(train)+len(val)]\n GEN_test = GEN_data_all[len(train)+len(val):len(train)+len(val)+len(test)]\n # GEN_data_all = sparse.csr_matrix(GEN_data_all)\n del GEN_data_all\n\n GEN_array = sparse.csr_matrix(GEN_array)\n GEN_val = sparse.csr_matrix(GEN_val)\n GEN_test = sparse.csr_matrix(GEN_test)\n\n content = data_all['plylst_title']\n if \"{}.model\".format(sp_total_model_path) not in os.listdir():\n makeSentencepieceModel(data_all,sp_total_model_path)\n sp = SentencePieceProcessor()\n sp.Load(\"{}.model\".format(sp_total_model_path))\n\n cv = CountVectorizer(max_features=3000, tokenizer=sp.encode_as_pieces)\n content = data_all['plylst_title']\n tdm = cv.fit_transform(content)\n\n title_tdm = tdm.toarray()\n\n title_tr = title_tdm[:len(train)]\n title_va = title_tdm[len(train):len(train)+len(val)]\n title_ts = title_tdm[len(train)+len(val):len(train)+len(val)+len(test)]\n\n title_gnr = np.concatenate((gnr_array,title_tr),axis=1)\n val_title_gnr = np.concatenate((gnr_val,title_va),axis=1)\n test_title_gnr = np.concatenate((gnr_test,title_ts),axis=1)\n\n title_sp = sparse.csr_matrix(title_tdm)\n\n title_gnr = sparse.csr_matrix(title_gnr)\n val_title_gnr = sparse.csr_matrix(val_title_gnr)\n test_title_gnr = sparse.csr_matrix(test_title_gnr)\n\n title_gnr = vstack([title_gnr,val_title_gnr,test_title_gnr])\n song_sp = vstack([train_user_songs_A,val_user_songs_A,test_user_songs_A])\n tag_sp = vstack([train_user_tags_A,val_user_tags_A,test_user_tags_A])\n times_sp = vstack([tim_array,tim_val,tim_test])\n GEN_sp = vstack([GEN_array,GEN_val,GEN_test])\n\n\n ART_sp = vstack([ART_array,ART_val,ART_test])\n\n # song_sp_T = song_sp.T.tocsr()\n # tag_sp_T = tag_sp.T.tocsr()\n\n\n model_knn_song25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_tag25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_title25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_title_gnr25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_times25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_GEN25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n model_knn_ART25 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=25, n_jobs=-1)\n\n model_knn_song40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_tag40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_title40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_title_gnr40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_times40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_GEN40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n model_knn_ART40 = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=40, n_jobs=-1)\n\n\n\n model_knn_song25.fit(song_sp)\n model_knn_tag25.fit(tag_sp)\n model_knn_title25.fit(title_sp)\n model_knn_title_gnr25.fit(title_gnr)\n model_knn_times25.fit(times_sp)\n model_knn_GEN25.fit(GEN_sp)\n model_knn_ART25.fit(ART_sp)\n\n model_knn_song40.fit(song_sp)\n model_knn_tag40.fit(tag_sp)\n model_knn_title40.fit(title_sp)\n model_knn_title_gnr40.fit(title_gnr)\n model_knn_times40.fit(times_sp)\n model_knn_GEN40.fit(GEN_sp)\n model_knn_ART40.fit(ART_sp)\n\n\n\n\n\n\n train.loc[:,'num_songs'] = train['songs'].map(len)\n train.loc[:,'num_tags'] = train['tags_id'].map(len)\n\n data_all = pd.concat([train,val,test])\n\n data_all.index = range(len(data_all))\n\n\n res = []\n for i in tqdm_notebook(range(len(test))):\n data = test.iloc[i]\n pid = i\n\n if len(data['songs']) >= 2 and len(data['tags_id']) >=2 :\n p = np.zeros((707989,1))\n p[data['songs']] = 1\n\n pp = np.zeros((n_tags,1))\n pp[data['tags_id']] = 1\n\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n test_tag = cosine_similarity(tra_tag_sp,pp.T)\n\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n\n testi = test_song * test_tag * test_title_genre * test_tim * test_GEN * test_ART\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n ####### 40 ####################################################\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n tra_tag = data_all.iloc[model_knn_tag40.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(40), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n test_tag = cosine_similarity(tra_tag_sp,pp.T)\n\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n\n testi = test_song * test_tag * test_title_genre * test_tim * test_GEN * test_ART\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n\n cand_all = pd.merge(cand1,cand2,how='outer',on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y'])/2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],ascending=False)[:100]['index'])\n\n ######tag######\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n\n elif len(data['songs']) != 0:\n p = np.zeros((707989,1))\n p[data['songs']] = 1\n\n tra_song = data_all.iloc[model_knn_song25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n # tra_tag = data_all.iloc[model_knn_tag25.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(25), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n tra_tim = times_sp[model_knn_times25.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN25.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_ART = ART_sp[model_knn_ART25.kneighbors(ART_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr25.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n\n testi = test_song*test_title_genre*test_tim*test_GEN * test_ART\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거\n cand1 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n ####### 40 ####################################################\n tra_song = data_all.iloc[model_knn_song40.kneighbors(p.T)[1][0]]\n row = np.repeat(range(40), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n row = np.repeat(range(40), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(40, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n tra_tim = times_sp[model_knn_times40.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN40.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_ART = ART_sp[model_knn_ART40.kneighbors(ART_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr40.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_ART = cosine_similarity(tra_ART,ART_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n\n testi = test_song * test_title_genre * test_tim * test_GEN * test_ART\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False] # 중복제거\n cand2 = pd.DataFrame(cand_song).iloc[cand_song_idx].reset_index()\n\n cand_all = pd.merge(cand1,cand2,how='outer',on='index')\n cand_all = cand_all.fillna(0)\n cand_all['pred'] = (cand_all['0_x'] + cand_all['0_y'])/2\n cand_song_idx = list(cand_all.sort_values(by=['pred'],ascending=False)[:100]['index'])\n\n #######tag########\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n\n elif len(data['tags_id']) !=0:\n p = np.zeros((n_tags,1))\n p[data['tags_id']] = 1\n\n tra_tag = data_all.iloc[model_knn_tag25.kneighbors(p.T)[1][0]]\n row = np.repeat(range(25), tra_tag['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_tag['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_tag['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n row = np.repeat(range(25), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(25, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n testi = cosine_similarity(tra_tag_sp,pp.T)\n\n if len(data['plylst_title']) != 0 :\n tra_title_gnr = title_tdm[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr,title_ts[i:(i+1)])\n testi = testi * testi_title\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출\n\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": list(cand_song_idx),\n \"tags\": rec_tag_idx\n })\n\n else :\n cand_song = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n\n cand_tag = []\n for li in data_all.iloc[model_knn_title25.kneighbors(title_ts[i:(i+1)])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[:100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10].index)\n\n res.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n for i in range(len(res)):\n if len(res[i]['songs']) != 100:\n print('song 에서 {}번째 오류 발생'.format(i))\n\n if len(res[i]['tags']) != 10:\n print('tag 에서 {}번째 오류 발생'.format(i))\n\n rec = []\n for i in range(len(res)):\n rec.append({\n \"id\": res[i]['id'],\n \"songs\": list(res[i]['songs']),\n \"tags\": res[i]['tags']\n })\n\n result1 = pd.DataFrame(rec)\n\n model_knn_song = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_tag = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_title = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_title_gnr = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_times = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_GEN = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n model_knn_ART = NearestNeighbors(metric='cosine', algorithm='brute', n_neighbors=50, n_jobs=-1)\n\n model_knn_song.fit(song_sp)\n model_knn_tag.fit(tag_sp)\n model_knn_title.fit(title_sp)\n model_knn_title_gnr.fit(title_gnr)\n model_knn_times.fit(times_sp)\n model_knn_GEN.fit(GEN_sp)\n model_knn_ART.fit(ART_sp)\n\n res2 = []\n for i in tqdm_notebook([1960, 6361, 8705, 9310, 10498]):\n data = test.iloc[i]\n pid = i\n\n if len(data['songs']) != 0 and len(data['tags_id']) != 0:\n p = np.zeros((707989,1))\n p[data['songs']] = 1\n\n pp = np.zeros((n_tags,1))\n pp[data['tags_id']] = 1\n\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(pp.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n test_tag = cosine_similarity(tra_tag_sp,pp.T)\n\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n\n testi = test_song * test_tag * test_title_genre * test_GEN\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출\n\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res2.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n\n elif len(data['songs']) != 0:\n p = np.zeros((707989,1))\n p[data['songs']] = 1\n\n tra_song = data_all.iloc[model_knn_song.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_song['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_song['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_song['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n row = np.repeat(range(50), tra_song['num_tags'])\n col = [tag for tags in tra_song['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_song['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n tra_tim = times_sp[model_knn_times.kneighbors(tim_test[i:(i+1)])[1][0]]\n tra_GEN = GEN_sp[model_knn_GEN.kneighbors(GEN_test[i:(i+1)])[1][0]]\n tra_title_gnr = title_gnr[model_knn_title_gnr.kneighbors(test_title_gnr[i:(i+1)])[1][0]]\n\n test_song = cosine_similarity(tra_song_sp,p.T)\n\n test_tim = cosine_similarity(tra_tim,tim_test[i:(i+1)])\n test_GEN = cosine_similarity(tra_GEN,GEN_test[i:(i+1)])\n test_title_genre = cosine_similarity(tra_title_gnr,test_title_gnr[i:(i+1)])\n testi = test_song*test_title_genre*test_tim*test_GEN\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-200:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출\n\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res2.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n elif len(data['tags_id']) !=0:\n p = np.zeros((n_tags,1))\n p[data['tags_id']] = 1\n\n tra_tag = data_all.iloc[model_knn_tag.kneighbors(p.T)[1][0]]\n row = np.repeat(range(50), tra_tag['num_songs']) # User Index 별 노래 개수만큼 만듦\n col = [song for songs in tra_tag['songs'] for song in songs] # Song dic number 추출\n dat = np.repeat(1, tra_tag['num_songs'].sum()) # User별 Song이 있는 부분에 1을 넣기위해 1과 전체 노래 개수만큼 만듦\n tra_song_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_songs)) # csr_matrix 제작\n tra_song_sp_T = tra_song_sp.T.tocsr()\n\n row = np.repeat(range(50), tra_tag['num_tags'])\n col = [tag for tags in tra_tag['tags_id'] for tag in tags]\n dat = np.repeat(1, tra_tag['num_tags'].sum())\n tra_tag_sp = spr.csr_matrix((dat, (row, col)), shape=(50, n_tags))\n tra_tag_sp_T = tra_tag_sp.T.tocsr()\n\n\n songs_already = data[\"songs\"]\n tags_already = data[\"tags_id\"]\n\n testi = cosine_similarity(tra_tag_sp,pp.T)\n\n if len(data['plylst_title']) != 0 :\n tra_title_gnr = title_tdm[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]]\n testi_title = cosine_similarity(tra_title_gnr,title_ts[i:(i+1)])\n testi = testi * testi_title\n\n cand_song = tra_song_sp_T.dot(testi) # 행에는 노래 열에는 유저 정보 %*% 유사한 유저 -> 유사한 노래에 대하여 높은 값 나옴\n cand_song_idx = cand_song.reshape(-1).argsort()[-300:][::-1] # 값이 높은 상위 120개 노래 추출\n\n cand_song_idx = cand_song_idx[np.isin(cand_song_idx, songs_already) == False][:100] # 중복되는 노래 있는지 확인하고 100개 추출\n\n cand_tag = tra_tag_sp_T.dot(testi) # 똑같은 작업 실시\n cand_tag_idx = cand_tag.reshape(-1).argsort()[-30:][::-1]\n\n cand_tag_idx = cand_tag_idx[np.isin(cand_tag_idx, tags_already) == False][:10]\n rec_tag_idx = [tag_tid_id[i] for i in cand_tag_idx]\n\n res2.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n else:\n cand_song = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]].songs.to_list():\n for j in li:\n cand_song.append(j)\n\n cand_tag = []\n for li in data_all.iloc[model_knn_title.kneighbors(title_ts[i:(i+1)])[1][0]].tags.to_list():\n for j in li:\n cand_tag.append(j)\n\n cand_song_idx = list(pd.DataFrame(cand_song)[0].value_counts()[:100].index)\n rec_tag_idx = list(pd.DataFrame(cand_tag)[0].value_counts()[:10].index)\n\n res2.append({\n \"id\": test.loc[pid,'id'],\n \"songs\": cand_song_idx,\n \"tags\": rec_tag_idx\n })\n\n\n pd.DataFrame(res2)\n\n rec2 = []\n for i in range(len(res2)):\n rec2.append({\n \"id\": res2[i]['id'],\n \"songs\": list(res2[i]['songs']),\n \"tags\": res2[i]['tags']\n })\n\n result2 = pd.DataFrame(rec2)['songs']\n\n n_index = [10498,6361,1960,8705,9310]\n\n result2.index = n_index\n\n result1.loc[n_index,'songs'] = result2\n\n result1['songs'].apply(len).sort_values()\n #그럼에도 채워지지 않은 6361에 대해서 상위 100곡 추천\n s = []\n for song in train.songs.tolist():\n s += song\n r1 = dict(Counter(s))\n\n r_song = sorted(r1.items(), key=lambda x: -x[1])\n r_song_top = r_song[:100] # 몇 곡 할지도 정해야 함\n\n list_song = list(dict(r_song_top).keys())\n len(list_song)\n\n sub= []\n for j in range(len(result1)) :\n sub.append(result1.loc[j].to_dict())\n\n sub[6361]['songs'] = list_song\n\n pd.DataFrame(sub)['songs'].apply(len).sort_values()\n write_json(sub,'final_songs.json')\n return sub\n\nif __name__ == '__main__':\n\n _data = Dataset()\n\n pre_tag.run(_data.test,_data.n_songs,_data.n_tags,_data.spr_list,_data.tag_tid_id)\n final_tags = word2vec_for_tag.run(_data.total,_data.test)\n\n final_songs = song_inference()\n result = []\n for f_songs, f_tags in zip(final_songs,final_tags):\n result.append({\n 'id':f_songs['id'],\n 'songs':f_songs['songs'],\n 'tags':f_tags['tags']\n })\n write_json(result, 'results.json')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests as r
import re
class web_scrap:
seed=""
result=""
tag_attr=[]
def __init__(self,seed):
self.seed=seed
self.set_tag()
self.set_attr()
self.fetch_web(self.seed)
self.crawl()
def fetch_web(self,link):
self.result=r.get(link)
self.extract_tags()
def set_tag(self):
self.re_tag=r"(<a [^>]+>)"
def set_attr(self):
self.re_attr_parser=r"href\=\"([^\"]+)\""
def extract_tags(self):
title=re.findall(r"<title>([^<]+)</title>",self.result.text)
if len(title)!=0:
print(title[0])
else:
print("No Title")
tags=re.findall(self.re_tag,self.result.text)
for i in tags:
self.attr_parser(i)
def attr_parser(self,tag):
attributes=re.findall(self.re_attr_parser,tag)
for data in attributes:
if data[0]=="/":
if data[1]=="/":
self.tag_attr.append({data[1:]:0})
else:
self.tag_attr.append({data:0})
def crawl(self):
for i in self.tag_attr:
link=list(i.keys())[0]
if(not i[link]):
print(link)
self.fetch_web(self.seed+link)
print("\t HELLO WELCOME TO EMAIL SCRAPPER")
scrap=web_scrap(input("enter the link \t"))
|
normal
|
{
"blob_id": "f26dc3139413c4ed4b04484c095a433e53039cdb",
"index": 3028,
"step-1": "<mask token>\n\n\nclass web_scrap:\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, seed):\n self.seed = seed\n self.set_tag()\n self.set_attr()\n self.fetch_web(self.seed)\n self.crawl()\n\n def fetch_web(self, link):\n self.result = r.get(link)\n self.extract_tags()\n\n def set_tag(self):\n self.re_tag = '(<a [^>]+>)'\n\n def set_attr(self):\n self.re_attr_parser = 'href\\\\=\\\\\"([^\\\\\"]+)\\\\\"'\n\n def extract_tags(self):\n title = re.findall('<title>([^<]+)</title>', self.result.text)\n if len(title) != 0:\n print(title[0])\n else:\n print('No Title')\n tags = re.findall(self.re_tag, self.result.text)\n for i in tags:\n self.attr_parser(i)\n\n def attr_parser(self, tag):\n attributes = re.findall(self.re_attr_parser, tag)\n for data in attributes:\n if data[0] == '/':\n if data[1] == '/':\n self.tag_attr.append({data[1:]: 0})\n else:\n self.tag_attr.append({data: 0})\n\n def crawl(self):\n for i in self.tag_attr:\n link = list(i.keys())[0]\n if not i[link]:\n print(link)\n self.fetch_web(self.seed + link)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass web_scrap:\n seed = ''\n result = ''\n tag_attr = []\n\n def __init__(self, seed):\n self.seed = seed\n self.set_tag()\n self.set_attr()\n self.fetch_web(self.seed)\n self.crawl()\n\n def fetch_web(self, link):\n self.result = r.get(link)\n self.extract_tags()\n\n def set_tag(self):\n self.re_tag = '(<a [^>]+>)'\n\n def set_attr(self):\n self.re_attr_parser = 'href\\\\=\\\\\"([^\\\\\"]+)\\\\\"'\n\n def extract_tags(self):\n title = re.findall('<title>([^<]+)</title>', self.result.text)\n if len(title) != 0:\n print(title[0])\n else:\n print('No Title')\n tags = re.findall(self.re_tag, self.result.text)\n for i in tags:\n self.attr_parser(i)\n\n def attr_parser(self, tag):\n attributes = re.findall(self.re_attr_parser, tag)\n for data in attributes:\n if data[0] == '/':\n if data[1] == '/':\n self.tag_attr.append({data[1:]: 0})\n else:\n self.tag_attr.append({data: 0})\n\n def crawl(self):\n for i in self.tag_attr:\n link = list(i.keys())[0]\n if not i[link]:\n print(link)\n self.fetch_web(self.seed + link)\n\n\nprint('\\t HELLO WELCOME TO EMAIL SCRAPPER')\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass web_scrap:\n seed = ''\n result = ''\n tag_attr = []\n\n def __init__(self, seed):\n self.seed = seed\n self.set_tag()\n self.set_attr()\n self.fetch_web(self.seed)\n self.crawl()\n\n def fetch_web(self, link):\n self.result = r.get(link)\n self.extract_tags()\n\n def set_tag(self):\n self.re_tag = '(<a [^>]+>)'\n\n def set_attr(self):\n self.re_attr_parser = 'href\\\\=\\\\\"([^\\\\\"]+)\\\\\"'\n\n def extract_tags(self):\n title = re.findall('<title>([^<]+)</title>', self.result.text)\n if len(title) != 0:\n print(title[0])\n else:\n print('No Title')\n tags = re.findall(self.re_tag, self.result.text)\n for i in tags:\n self.attr_parser(i)\n\n def attr_parser(self, tag):\n attributes = re.findall(self.re_attr_parser, tag)\n for data in attributes:\n if data[0] == '/':\n if data[1] == '/':\n self.tag_attr.append({data[1:]: 0})\n else:\n self.tag_attr.append({data: 0})\n\n def crawl(self):\n for i in self.tag_attr:\n link = list(i.keys())[0]\n if not i[link]:\n print(link)\n self.fetch_web(self.seed + link)\n\n\nprint('\\t HELLO WELCOME TO EMAIL SCRAPPER')\nscrap = web_scrap(input('enter the link \\t'))\n",
"step-4": "import requests as r\nimport re\n\n\nclass web_scrap:\n seed = ''\n result = ''\n tag_attr = []\n\n def __init__(self, seed):\n self.seed = seed\n self.set_tag()\n self.set_attr()\n self.fetch_web(self.seed)\n self.crawl()\n\n def fetch_web(self, link):\n self.result = r.get(link)\n self.extract_tags()\n\n def set_tag(self):\n self.re_tag = '(<a [^>]+>)'\n\n def set_attr(self):\n self.re_attr_parser = 'href\\\\=\\\\\"([^\\\\\"]+)\\\\\"'\n\n def extract_tags(self):\n title = re.findall('<title>([^<]+)</title>', self.result.text)\n if len(title) != 0:\n print(title[0])\n else:\n print('No Title')\n tags = re.findall(self.re_tag, self.result.text)\n for i in tags:\n self.attr_parser(i)\n\n def attr_parser(self, tag):\n attributes = re.findall(self.re_attr_parser, tag)\n for data in attributes:\n if data[0] == '/':\n if data[1] == '/':\n self.tag_attr.append({data[1:]: 0})\n else:\n self.tag_attr.append({data: 0})\n\n def crawl(self):\n for i in self.tag_attr:\n link = list(i.keys())[0]\n if not i[link]:\n print(link)\n self.fetch_web(self.seed + link)\n\n\nprint('\\t HELLO WELCOME TO EMAIL SCRAPPER')\nscrap = web_scrap(input('enter the link \\t'))\n",
"step-5": "import requests as r\r\nimport re\r\nclass web_scrap:\r\n seed=\"\"\r\n result=\"\"\r\n tag_attr=[]\r\n \r\n def __init__(self,seed):\r\n self.seed=seed\r\n self.set_tag()\r\n self.set_attr()\r\n self.fetch_web(self.seed)\r\n self.crawl() \r\n\r\n\r\n def fetch_web(self,link):\r\n self.result=r.get(link)\r\n self.extract_tags()\r\n \r\n def set_tag(self):\r\n self.re_tag=r\"(<a [^>]+>)\"\r\n\r\n def set_attr(self):\r\n self.re_attr_parser=r\"href\\=\\\"([^\\\"]+)\\\"\"\r\n\r\n def extract_tags(self):\r\n title=re.findall(r\"<title>([^<]+)</title>\",self.result.text)\r\n if len(title)!=0:\r\n print(title[0])\r\n else:\r\n print(\"No Title\")\r\n tags=re.findall(self.re_tag,self.result.text)\r\n for i in tags:\r\n self.attr_parser(i)\r\n\r\n def attr_parser(self,tag):\r\n attributes=re.findall(self.re_attr_parser,tag)\r\n for data in attributes:\r\n if data[0]==\"/\":\r\n if data[1]==\"/\":\r\n self.tag_attr.append({data[1:]:0})\r\n else:\r\n self.tag_attr.append({data:0})\r\n \r\n def crawl(self):\r\n for i in self.tag_attr:\r\n link=list(i.keys())[0]\r\n if(not i[link]):\r\n print(link)\r\n self.fetch_web(self.seed+link)\r\n \r\n \r\n \r\nprint(\"\\t HELLO WELCOME TO EMAIL SCRAPPER\")\r\n\r\nscrap=web_scrap(input(\"enter the link \\t\"))\r\n",
"step-ids": [
8,
10,
11,
12,
13
]
}
|
[
8,
10,
11,
12,
13
] |
n = int(input("Please input the number of 1's and 0's you want to print:"))
for i in range (1, n+1):
if i%2 == 1:
print ("1 ", end = "")
else:
print ("0 ", end = "")
|
normal
|
{
"blob_id": "bd96b31c5de2f0ad4bbc28c876b86ec238db3184",
"index": 9108,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, n + 1):\n if i % 2 == 1:\n print('1 ', end='')\n else:\n print('0 ', end='')\n",
"step-3": "n = int(input(\"Please input the number of 1's and 0's you want to print:\"))\nfor i in range(1, n + 1):\n if i % 2 == 1:\n print('1 ', end='')\n else:\n print('0 ', end='')\n",
"step-4": "n = int(input(\"Please input the number of 1's and 0's you want to print:\"))\n\nfor i in range (1, n+1):\n if i%2 == 1:\n print (\"1 \", end = \"\")\n else:\n print (\"0 \", end = \"\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_logger():
return current_app.logger
<|reserved_special_token_0|>
def info(msg, *args, **kwargs):
get_logger().info(msg, *args, **kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_logger():
return current_app.logger
def debug(msg, *args, **kwargs):
get_logger().debug(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
get_logger().info(msg, *args, **kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_logger():
return current_app.logger
def debug(msg, *args, **kwargs):
get_logger().debug(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
get_logger().info(msg, *args, **kwargs)
<|reserved_special_token_0|>
def error(msg, *args, **kwargs):
get_logger().error(msg, *args, **kwargs)
<|reserved_special_token_1|>
from flask import current_app
def get_logger():
return current_app.logger
def debug(msg, *args, **kwargs):
get_logger().debug(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
get_logger().info(msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
get_logger().warning(msg, *args, **kwargs)
def error(msg, *args, **kwargs):
get_logger().error(msg, *args, **kwargs)
|
flexible
|
{
"blob_id": "355e2799e89dfea4f775480ea7d829a075f92473",
"index": 4241,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_logger():\n return current_app.logger\n\n\n<mask token>\n\n\ndef info(msg, *args, **kwargs):\n get_logger().info(msg, *args, **kwargs)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_logger():\n return current_app.logger\n\n\ndef debug(msg, *args, **kwargs):\n get_logger().debug(msg, *args, **kwargs)\n\n\ndef info(msg, *args, **kwargs):\n get_logger().info(msg, *args, **kwargs)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef get_logger():\n return current_app.logger\n\n\ndef debug(msg, *args, **kwargs):\n get_logger().debug(msg, *args, **kwargs)\n\n\ndef info(msg, *args, **kwargs):\n get_logger().info(msg, *args, **kwargs)\n\n\n<mask token>\n\n\ndef error(msg, *args, **kwargs):\n get_logger().error(msg, *args, **kwargs)\n",
"step-5": "from flask import current_app\n\n\ndef get_logger():\n return current_app.logger\n\n\ndef debug(msg, *args, **kwargs):\n get_logger().debug(msg, *args, **kwargs)\n\n\ndef info(msg, *args, **kwargs):\n get_logger().info(msg, *args, **kwargs)\n\n\ndef warn(msg, *args, **kwargs):\n get_logger().warning(msg, *args, **kwargs)\n\n\ndef error(msg, *args, **kwargs):\n get_logger().error(msg, *args, **kwargs)\n",
"step-ids": [
0,
2,
3,
4,
6
]
}
|
[
0,
2,
3,
4,
6
] |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 25 12:07:32 2021
@author: yashv
"""
import numpy as np
X= [0.7, 1.5]
Y= [3.9,0.2]
def f(w,b,x): #sigmoid logistic function
return 1.0/(1.0 + np.exp(-(w*x +b)))
def error(w,b): #loss function
err=0.0
for x,y in zip(X,Y):
fx= f(w,b,x)
err += 0.5 * (fx - y) **2
return err
def grad_b(w,b,x,y):
fx= f(w,b,x)
return (fx - y)* fx * (1-fx)
def grad_w(w,b,x,y):
fx= f(w,b,x)
return (fx - y)* fx * (1-fx) * x
def do_gradient_descent():
w, b, eta, max_epochs = 10, 10, 6.0, 1000
for i in range(max_epochs):
dw, db = 0,0
for x,y in zip(X,Y):
dw += grad_w(w,b,x,y)
db += grad_b(w,b,x,y)
w = w - eta * dw
b = b - eta * dw
print(w,b)
print("e:",error(w,b))
do_gradient_descent()
|
normal
|
{
"blob_id": "2387856757ad1c3ff911cf2a7537ca6df7786997",
"index": 9244,
"step-1": "<mask token>\n\n\ndef f(w, b, x):\n return 1.0 / (1.0 + np.exp(-(w * x + b)))\n\n\ndef error(w, b):\n err = 0.0\n for x, y in zip(X, Y):\n fx = f(w, b, x)\n err += 0.5 * (fx - y) ** 2\n return err\n\n\ndef grad_b(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx)\n\n\ndef grad_w(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx) * x\n\n\ndef do_gradient_descent():\n w, b, eta, max_epochs = 10, 10, 6.0, 1000\n for i in range(max_epochs):\n dw, db = 0, 0\n for x, y in zip(X, Y):\n dw += grad_w(w, b, x, y)\n db += grad_b(w, b, x, y)\n w = w - eta * dw\n b = b - eta * dw\n print(w, b)\n print('e:', error(w, b))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef f(w, b, x):\n return 1.0 / (1.0 + np.exp(-(w * x + b)))\n\n\ndef error(w, b):\n err = 0.0\n for x, y in zip(X, Y):\n fx = f(w, b, x)\n err += 0.5 * (fx - y) ** 2\n return err\n\n\ndef grad_b(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx)\n\n\ndef grad_w(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx) * x\n\n\ndef do_gradient_descent():\n w, b, eta, max_epochs = 10, 10, 6.0, 1000\n for i in range(max_epochs):\n dw, db = 0, 0\n for x, y in zip(X, Y):\n dw += grad_w(w, b, x, y)\n db += grad_b(w, b, x, y)\n w = w - eta * dw\n b = b - eta * dw\n print(w, b)\n print('e:', error(w, b))\n\n\ndo_gradient_descent()\n",
"step-3": "<mask token>\nX = [0.7, 1.5]\nY = [3.9, 0.2]\n\n\ndef f(w, b, x):\n return 1.0 / (1.0 + np.exp(-(w * x + b)))\n\n\ndef error(w, b):\n err = 0.0\n for x, y in zip(X, Y):\n fx = f(w, b, x)\n err += 0.5 * (fx - y) ** 2\n return err\n\n\ndef grad_b(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx)\n\n\ndef grad_w(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx) * x\n\n\ndef do_gradient_descent():\n w, b, eta, max_epochs = 10, 10, 6.0, 1000\n for i in range(max_epochs):\n dw, db = 0, 0\n for x, y in zip(X, Y):\n dw += grad_w(w, b, x, y)\n db += grad_b(w, b, x, y)\n w = w - eta * dw\n b = b - eta * dw\n print(w, b)\n print('e:', error(w, b))\n\n\ndo_gradient_descent()\n",
"step-4": "<mask token>\nimport numpy as np\nX = [0.7, 1.5]\nY = [3.9, 0.2]\n\n\ndef f(w, b, x):\n return 1.0 / (1.0 + np.exp(-(w * x + b)))\n\n\ndef error(w, b):\n err = 0.0\n for x, y in zip(X, Y):\n fx = f(w, b, x)\n err += 0.5 * (fx - y) ** 2\n return err\n\n\ndef grad_b(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx)\n\n\ndef grad_w(w, b, x, y):\n fx = f(w, b, x)\n return (fx - y) * fx * (1 - fx) * x\n\n\ndef do_gradient_descent():\n w, b, eta, max_epochs = 10, 10, 6.0, 1000\n for i in range(max_epochs):\n dw, db = 0, 0\n for x, y in zip(X, Y):\n dw += grad_w(w, b, x, y)\n db += grad_b(w, b, x, y)\n w = w - eta * dw\n b = b - eta * dw\n print(w, b)\n print('e:', error(w, b))\n\n\ndo_gradient_descent()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Mon Jan 25 12:07:32 2021\r\n\r\n@author: yashv\r\n\"\"\"\r\n\r\nimport numpy as np\r\n\r\nX= [0.7, 1.5]\r\nY= [3.9,0.2]\r\n\r\ndef f(w,b,x): #sigmoid logistic function\r\n return 1.0/(1.0 + np.exp(-(w*x +b)))\r\n\r\ndef error(w,b): #loss function\r\n err=0.0\r\n for x,y in zip(X,Y):\r\n fx= f(w,b,x)\r\n err += 0.5 * (fx - y) **2\r\n return err\r\n\r\ndef grad_b(w,b,x,y):\r\n fx= f(w,b,x)\r\n return (fx - y)* fx * (1-fx) \r\n\r\ndef grad_w(w,b,x,y):\r\n fx= f(w,b,x)\r\n return (fx - y)* fx * (1-fx) * x\r\n\r\ndef do_gradient_descent():\r\n w, b, eta, max_epochs = 10, 10, 6.0, 1000\r\n for i in range(max_epochs):\r\n dw, db = 0,0 \r\n for x,y in zip(X,Y):\r\n dw += grad_w(w,b,x,y)\r\n db += grad_b(w,b,x,y)\r\n w = w - eta * dw\r\n b = b - eta * dw\r\n print(w,b)\r\n print(\"e:\",error(w,b))\r\n \r\ndo_gradient_descent()\r\n\r\n\r\n\r\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def standard_env() ->Env:
"""An environment with some scheme standard procedures"""
env = Env()
env.update(vars(math))
env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':
op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,
'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':
lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],
'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':
op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:
isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.
not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,
Number), 'print': print, 'procedure?': callable, 'round': round,
'symbol?': lambda x: isinstance(x, Symbol)})
return env
<|reserved_special_token_0|>
def eval(x: Exp, env=global_env) ->Exp:
"""Evaluate an expression in an environment."""
if isinstance(x, Symbol):
return env[x]
elif not isinstance(x, List):
return x
elif x[0] == 'if':
_, test, conseq, alt = x
exp = conseq if eval(test, env) else alt
return eval(exp, env)
elif x[0] == 'define':
_, symbol, exp = x
env[symbol] = eval(exp, env)
else:
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def tokenize(chars: str) ->list:
"""convert a string of characters into a list of tokens"""
return chars.replace('(', ' ( ').replace(')', ' ) ').split()
def parse(program: str) ->Exp:
"""Read a scheme expression from a string"""
return read_from_tokens(tokenize(program))
<|reserved_special_token_0|>
def atom(token: str) ->Atom:
"""Numbers become numbers; every other token is a symbol"""
try:
return int(token)
except ValueError:
try:
return float(token)
except ValueError:
return Symbol(token)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def standard_env() ->Env:
"""An environment with some scheme standard procedures"""
env = Env()
env.update(vars(math))
env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':
op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,
'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':
lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],
'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':
op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:
isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.
not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,
Number), 'print': print, 'procedure?': callable, 'round': round,
'symbol?': lambda x: isinstance(x, Symbol)})
return env
<|reserved_special_token_0|>
def eval(x: Exp, env=global_env) ->Exp:
"""Evaluate an expression in an environment."""
if isinstance(x, Symbol):
return env[x]
elif not isinstance(x, List):
return x
elif x[0] == 'if':
_, test, conseq, alt = x
exp = conseq if eval(test, env) else alt
return eval(exp, env)
elif x[0] == 'define':
_, symbol, exp = x
env[symbol] = eval(exp, env)
else:
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def tokenize(chars: str) ->list:
"""convert a string of characters into a list of tokens"""
return chars.replace('(', ' ( ').replace(')', ' ) ').split()
def parse(program: str) ->Exp:
"""Read a scheme expression from a string"""
return read_from_tokens(tokenize(program))
def read_from_tokens(tokens: list) ->Exp:
"""Read an expression from a sequence of tokens"""
if len(tokens) == 0:
raise SyntaxError('unexpected EOF')
token = tokens.pop(0)
if token == '(':
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0)
return L
elif token == ')':
raise SyntaxError('unexpected )')
else:
return atom(token)
def atom(token: str) ->Atom:
"""Numbers become numbers; every other token is a symbol"""
try:
return int(token)
except ValueError:
try:
return float(token)
except ValueError:
return Symbol(token)
<|reserved_special_token_0|>
print(eval(parse(program)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Symbol = str
Number = int, float
Atom = Symbol, Number
List = list
Exp = Atom, List
Env = dict
def standard_env() ->Env:
"""An environment with some scheme standard procedures"""
env = Env()
env.update(vars(math))
env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':
op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,
'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':
lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],
'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':
op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:
isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.
not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,
Number), 'print': print, 'procedure?': callable, 'round': round,
'symbol?': lambda x: isinstance(x, Symbol)})
return env
global_env = standard_env()
def eval(x: Exp, env=global_env) ->Exp:
"""Evaluate an expression in an environment."""
if isinstance(x, Symbol):
return env[x]
elif not isinstance(x, List):
return x
elif x[0] == 'if':
_, test, conseq, alt = x
exp = conseq if eval(test, env) else alt
return eval(exp, env)
elif x[0] == 'define':
_, symbol, exp = x
env[symbol] = eval(exp, env)
else:
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def tokenize(chars: str) ->list:
"""convert a string of characters into a list of tokens"""
return chars.replace('(', ' ( ').replace(')', ' ) ').split()
def parse(program: str) ->Exp:
"""Read a scheme expression from a string"""
return read_from_tokens(tokenize(program))
def read_from_tokens(tokens: list) ->Exp:
"""Read an expression from a sequence of tokens"""
if len(tokens) == 0:
raise SyntaxError('unexpected EOF')
token = tokens.pop(0)
if token == '(':
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0)
return L
elif token == ')':
raise SyntaxError('unexpected )')
else:
return atom(token)
def atom(token: str) ->Atom:
"""Numbers become numbers; every other token is a symbol"""
try:
return int(token)
except ValueError:
try:
return float(token)
except ValueError:
return Symbol(token)
program = '(begin (define r 10) (* pi (* r r)))'
print(eval(parse(program)))
<|reserved_special_token_1|>
import math
import operator as op
Symbol = str
Number = int, float
Atom = Symbol, Number
List = list
Exp = Atom, List
Env = dict
def standard_env() ->Env:
"""An environment with some scheme standard procedures"""
env = Env()
env.update(vars(math))
env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':
op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,
'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':
lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],
'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':
op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:
isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.
not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,
Number), 'print': print, 'procedure?': callable, 'round': round,
'symbol?': lambda x: isinstance(x, Symbol)})
return env
global_env = standard_env()
def eval(x: Exp, env=global_env) ->Exp:
"""Evaluate an expression in an environment."""
if isinstance(x, Symbol):
return env[x]
elif not isinstance(x, List):
return x
elif x[0] == 'if':
_, test, conseq, alt = x
exp = conseq if eval(test, env) else alt
return eval(exp, env)
elif x[0] == 'define':
_, symbol, exp = x
env[symbol] = eval(exp, env)
else:
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def tokenize(chars: str) ->list:
"""convert a string of characters into a list of tokens"""
return chars.replace('(', ' ( ').replace(')', ' ) ').split()
def parse(program: str) ->Exp:
"""Read a scheme expression from a string"""
return read_from_tokens(tokenize(program))
def read_from_tokens(tokens: list) ->Exp:
"""Read an expression from a sequence of tokens"""
if len(tokens) == 0:
raise SyntaxError('unexpected EOF')
token = tokens.pop(0)
if token == '(':
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0)
return L
elif token == ')':
raise SyntaxError('unexpected )')
else:
return atom(token)
def atom(token: str) ->Atom:
"""Numbers become numbers; every other token is a symbol"""
try:
return int(token)
except ValueError:
try:
return float(token)
except ValueError:
return Symbol(token)
program = '(begin (define r 10) (* pi (* r r)))'
print(eval(parse(program)))
<|reserved_special_token_1|>
import math
import operator as op
Symbol = str
Number = (int, float)
Atom = (Symbol, Number)
List = list
Exp = (Atom, List)
Env = dict
def standard_env() -> Env:
"An environment with some scheme standard procedures"
env = Env()
env.update(vars(math)) # sin, cos, sqrt, pi ...
env.update({
'+':op.add, '-':op.sub, '*':op.mul, '/':op.truediv,
'>':op.gt, '>':op.lt, '>=':op.ge, '<=':op.le, '=':op.eq,
'abs':abs,
'append':op.add,
'apply':lambda proc, args: proc(*args),
'begin':lambda *x: x[-1],
'car':lambda x: x[0],
'cdr':lambda x: x[1:],
'cons':lambda x,y: [x] + y,
'eq?':op.is_,
'expt':pow,
'equal?':op.eq,
'length':len,
'list':lambda *x: List(x),
'list?':lambda x: isinstance(x, List),
'map':map,
'max':max,
'min':min,
'not':op.not_,
'null?':lambda x: x == [],
'number?':lambda x: isinstance(x, Number),
'print':print,
'procedure?':callable,
'round':round,
'symbol?':lambda x: isinstance(x, Symbol),
})
return env
global_env = standard_env()
def eval(x: Exp, env=global_env) -> Exp:
"Evaluate an expression in an environment."
if isinstance(x, Symbol): # variable reference
return env[x]
elif not isinstance(x, List): # constant number
return x
elif x[0] == 'if': # conditional
(_, test, conseq, alt) = x
exp = (conseq if eval(test, env) else alt)
return eval(exp, env)
elif x[0] == 'define': # definition
(_, symbol, exp) = x
env[symbol] = eval(exp, env)
else: # procedure call
proc = eval(x[0], env)
args = [eval(arg, env) for arg in x[1:]]
return proc(*args)
def tokenize(chars: str) -> list:
"convert a string of characters into a list of tokens"
return chars.replace('(', ' ( ').replace(')', ' ) ').split()
def parse(program: str) -> Exp:
"Read a scheme expression from a string"
return read_from_tokens(tokenize(program))
def read_from_tokens(tokens: list) -> Exp:
"Read an expression from a sequence of tokens"
if len(tokens) == 0:
raise SyntaxError('unexpected EOF')
token = tokens.pop(0)
if token == '(':
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0) # pop off ')'
return L
elif token == ')':
raise SyntaxError('unexpected )')
else:
return atom(token)
def atom(token: str) -> Atom:
"Numbers become numbers; every other token is a symbol"
try: return int(token)
except ValueError:
try: return float(token)
except ValueError:
return Symbol(token)
program = "(begin (define r 10) (* pi (* r r)))"
print(eval(parse(program)))
|
flexible
|
{
"blob_id": "88862d6bee5d83dd5f1c656a06a9dc46a5254b10",
"index": 3608,
"step-1": "<mask token>\n\n\ndef standard_env() ->Env:\n \"\"\"An environment with some scheme standard procedures\"\"\"\n env = Env()\n env.update(vars(math))\n env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':\n op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,\n 'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':\n lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],\n 'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':\n op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:\n isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.\n not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,\n Number), 'print': print, 'procedure?': callable, 'round': round,\n 'symbol?': lambda x: isinstance(x, Symbol)})\n return env\n\n\n<mask token>\n\n\ndef eval(x: Exp, env=global_env) ->Exp:\n \"\"\"Evaluate an expression in an environment.\"\"\"\n if isinstance(x, Symbol):\n return env[x]\n elif not isinstance(x, List):\n return x\n elif x[0] == 'if':\n _, test, conseq, alt = x\n exp = conseq if eval(test, env) else alt\n return eval(exp, env)\n elif x[0] == 'define':\n _, symbol, exp = x\n env[symbol] = eval(exp, env)\n else:\n proc = eval(x[0], env)\n args = [eval(arg, env) for arg in x[1:]]\n return proc(*args)\n\n\ndef tokenize(chars: str) ->list:\n \"\"\"convert a string of characters into a list of tokens\"\"\"\n return chars.replace('(', ' ( ').replace(')', ' ) ').split()\n\n\ndef parse(program: str) ->Exp:\n \"\"\"Read a scheme expression from a string\"\"\"\n return read_from_tokens(tokenize(program))\n\n\n<mask token>\n\n\ndef atom(token: str) ->Atom:\n \"\"\"Numbers become numbers; every other token is a symbol\"\"\"\n try:\n return int(token)\n except ValueError:\n try:\n return float(token)\n except ValueError:\n return Symbol(token)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef standard_env() ->Env:\n \"\"\"An environment with some scheme standard procedures\"\"\"\n env = Env()\n env.update(vars(math))\n env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':\n op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,\n 'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':\n lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],\n 'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':\n op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:\n isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.\n not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,\n Number), 'print': print, 'procedure?': callable, 'round': round,\n 'symbol?': lambda x: isinstance(x, Symbol)})\n return env\n\n\n<mask token>\n\n\ndef eval(x: Exp, env=global_env) ->Exp:\n \"\"\"Evaluate an expression in an environment.\"\"\"\n if isinstance(x, Symbol):\n return env[x]\n elif not isinstance(x, List):\n return x\n elif x[0] == 'if':\n _, test, conseq, alt = x\n exp = conseq if eval(test, env) else alt\n return eval(exp, env)\n elif x[0] == 'define':\n _, symbol, exp = x\n env[symbol] = eval(exp, env)\n else:\n proc = eval(x[0], env)\n args = [eval(arg, env) for arg in x[1:]]\n return proc(*args)\n\n\ndef tokenize(chars: str) ->list:\n \"\"\"convert a string of characters into a list of tokens\"\"\"\n return chars.replace('(', ' ( ').replace(')', ' ) ').split()\n\n\ndef parse(program: str) ->Exp:\n \"\"\"Read a scheme expression from a string\"\"\"\n return read_from_tokens(tokenize(program))\n\n\ndef read_from_tokens(tokens: list) ->Exp:\n \"\"\"Read an expression from a sequence of tokens\"\"\"\n if len(tokens) == 0:\n raise SyntaxError('unexpected EOF')\n token = tokens.pop(0)\n if token == '(':\n L = []\n while tokens[0] != ')':\n L.append(read_from_tokens(tokens))\n tokens.pop(0)\n return L\n elif token == ')':\n raise SyntaxError('unexpected )')\n else:\n return atom(token)\n\n\ndef atom(token: str) ->Atom:\n \"\"\"Numbers become numbers; every other token is a symbol\"\"\"\n try:\n return int(token)\n except ValueError:\n try:\n return float(token)\n except ValueError:\n return Symbol(token)\n\n\n<mask token>\nprint(eval(parse(program)))\n",
"step-3": "<mask token>\nSymbol = str\nNumber = int, float\nAtom = Symbol, Number\nList = list\nExp = Atom, List\nEnv = dict\n\n\ndef standard_env() ->Env:\n \"\"\"An environment with some scheme standard procedures\"\"\"\n env = Env()\n env.update(vars(math))\n env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':\n op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,\n 'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':\n lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],\n 'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':\n op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:\n isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.\n not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,\n Number), 'print': print, 'procedure?': callable, 'round': round,\n 'symbol?': lambda x: isinstance(x, Symbol)})\n return env\n\n\nglobal_env = standard_env()\n\n\ndef eval(x: Exp, env=global_env) ->Exp:\n \"\"\"Evaluate an expression in an environment.\"\"\"\n if isinstance(x, Symbol):\n return env[x]\n elif not isinstance(x, List):\n return x\n elif x[0] == 'if':\n _, test, conseq, alt = x\n exp = conseq if eval(test, env) else alt\n return eval(exp, env)\n elif x[0] == 'define':\n _, symbol, exp = x\n env[symbol] = eval(exp, env)\n else:\n proc = eval(x[0], env)\n args = [eval(arg, env) for arg in x[1:]]\n return proc(*args)\n\n\ndef tokenize(chars: str) ->list:\n \"\"\"convert a string of characters into a list of tokens\"\"\"\n return chars.replace('(', ' ( ').replace(')', ' ) ').split()\n\n\ndef parse(program: str) ->Exp:\n \"\"\"Read a scheme expression from a string\"\"\"\n return read_from_tokens(tokenize(program))\n\n\ndef read_from_tokens(tokens: list) ->Exp:\n \"\"\"Read an expression from a sequence of tokens\"\"\"\n if len(tokens) == 0:\n raise SyntaxError('unexpected EOF')\n token = tokens.pop(0)\n if token == '(':\n L = []\n while tokens[0] != ')':\n L.append(read_from_tokens(tokens))\n tokens.pop(0)\n return L\n elif token == ')':\n raise SyntaxError('unexpected )')\n else:\n return atom(token)\n\n\ndef atom(token: str) ->Atom:\n \"\"\"Numbers become numbers; every other token is a symbol\"\"\"\n try:\n return int(token)\n except ValueError:\n try:\n return float(token)\n except ValueError:\n return Symbol(token)\n\n\nprogram = '(begin (define r 10) (* pi (* r r)))'\nprint(eval(parse(program)))\n",
"step-4": "import math\nimport operator as op\nSymbol = str\nNumber = int, float\nAtom = Symbol, Number\nList = list\nExp = Atom, List\nEnv = dict\n\n\ndef standard_env() ->Env:\n \"\"\"An environment with some scheme standard procedures\"\"\"\n env = Env()\n env.update(vars(math))\n env.update({'+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv, '>':\n op.gt, '>': op.lt, '>=': op.ge, '<=': op.le, '=': op.eq, 'abs': abs,\n 'append': op.add, 'apply': lambda proc, args: proc(*args), 'begin':\n lambda *x: x[-1], 'car': lambda x: x[0], 'cdr': lambda x: x[1:],\n 'cons': lambda x, y: [x] + y, 'eq?': op.is_, 'expt': pow, 'equal?':\n op.eq, 'length': len, 'list': lambda *x: List(x), 'list?': lambda x:\n isinstance(x, List), 'map': map, 'max': max, 'min': min, 'not': op.\n not_, 'null?': lambda x: x == [], 'number?': lambda x: isinstance(x,\n Number), 'print': print, 'procedure?': callable, 'round': round,\n 'symbol?': lambda x: isinstance(x, Symbol)})\n return env\n\n\nglobal_env = standard_env()\n\n\ndef eval(x: Exp, env=global_env) ->Exp:\n \"\"\"Evaluate an expression in an environment.\"\"\"\n if isinstance(x, Symbol):\n return env[x]\n elif not isinstance(x, List):\n return x\n elif x[0] == 'if':\n _, test, conseq, alt = x\n exp = conseq if eval(test, env) else alt\n return eval(exp, env)\n elif x[0] == 'define':\n _, symbol, exp = x\n env[symbol] = eval(exp, env)\n else:\n proc = eval(x[0], env)\n args = [eval(arg, env) for arg in x[1:]]\n return proc(*args)\n\n\ndef tokenize(chars: str) ->list:\n \"\"\"convert a string of characters into a list of tokens\"\"\"\n return chars.replace('(', ' ( ').replace(')', ' ) ').split()\n\n\ndef parse(program: str) ->Exp:\n \"\"\"Read a scheme expression from a string\"\"\"\n return read_from_tokens(tokenize(program))\n\n\ndef read_from_tokens(tokens: list) ->Exp:\n \"\"\"Read an expression from a sequence of tokens\"\"\"\n if len(tokens) == 0:\n raise SyntaxError('unexpected EOF')\n token = tokens.pop(0)\n if token == '(':\n L = []\n while tokens[0] != ')':\n L.append(read_from_tokens(tokens))\n tokens.pop(0)\n return L\n elif token == ')':\n raise SyntaxError('unexpected )')\n else:\n return atom(token)\n\n\ndef atom(token: str) ->Atom:\n \"\"\"Numbers become numbers; every other token is a symbol\"\"\"\n try:\n return int(token)\n except ValueError:\n try:\n return float(token)\n except ValueError:\n return Symbol(token)\n\n\nprogram = '(begin (define r 10) (* pi (* r r)))'\nprint(eval(parse(program)))\n",
"step-5": "import math\nimport operator as op\n\nSymbol = str\nNumber = (int, float)\nAtom = (Symbol, Number)\nList = list\nExp = (Atom, List)\nEnv = dict\n\ndef standard_env() -> Env:\n \"An environment with some scheme standard procedures\"\n env = Env()\n env.update(vars(math)) # sin, cos, sqrt, pi ...\n env.update({\n '+':op.add, '-':op.sub, '*':op.mul, '/':op.truediv,\n '>':op.gt, '>':op.lt, '>=':op.ge, '<=':op.le, '=':op.eq,\n 'abs':abs,\n 'append':op.add,\n 'apply':lambda proc, args: proc(*args),\n 'begin':lambda *x: x[-1],\n 'car':lambda x: x[0],\n 'cdr':lambda x: x[1:],\n 'cons':lambda x,y: [x] + y,\n 'eq?':op.is_,\n 'expt':pow,\n 'equal?':op.eq,\n 'length':len,\n 'list':lambda *x: List(x),\n 'list?':lambda x: isinstance(x, List),\n 'map':map,\n 'max':max,\n 'min':min,\n 'not':op.not_,\n 'null?':lambda x: x == [],\n 'number?':lambda x: isinstance(x, Number),\n 'print':print,\n 'procedure?':callable,\n 'round':round,\n 'symbol?':lambda x: isinstance(x, Symbol),\n })\n return env\n\nglobal_env = standard_env()\n\ndef eval(x: Exp, env=global_env) -> Exp:\n \"Evaluate an expression in an environment.\"\n if isinstance(x, Symbol): # variable reference\n return env[x]\n elif not isinstance(x, List): # constant number\n return x\n elif x[0] == 'if': # conditional\n (_, test, conseq, alt) = x\n exp = (conseq if eval(test, env) else alt)\n return eval(exp, env)\n elif x[0] == 'define': # definition\n (_, symbol, exp) = x\n env[symbol] = eval(exp, env)\n else: # procedure call\n proc = eval(x[0], env)\n args = [eval(arg, env) for arg in x[1:]]\n return proc(*args)\n\ndef tokenize(chars: str) -> list:\n \"convert a string of characters into a list of tokens\"\n return chars.replace('(', ' ( ').replace(')', ' ) ').split()\n\ndef parse(program: str) -> Exp:\n \"Read a scheme expression from a string\"\n return read_from_tokens(tokenize(program))\n\ndef read_from_tokens(tokens: list) -> Exp:\n \"Read an expression from a sequence of tokens\"\n if len(tokens) == 0:\n raise SyntaxError('unexpected EOF')\n token = tokens.pop(0)\n if token == '(':\n L = []\n while tokens[0] != ')':\n L.append(read_from_tokens(tokens))\n tokens.pop(0) # pop off ')'\n return L\n elif token == ')':\n raise SyntaxError('unexpected )')\n else:\n return atom(token)\n\ndef atom(token: str) -> Atom:\n \"Numbers become numbers; every other token is a symbol\"\n try: return int(token)\n except ValueError:\n try: return float(token)\n except ValueError:\n return Symbol(token)\n\nprogram = \"(begin (define r 10) (* pi (* r r)))\"\nprint(eval(parse(program)))\n",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
<|reserved_special_token_0|>
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
mycoin = Coin()
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
<|reserved_special_token_1|>
import random
class Coin(object):
def __init__(self):
self.sideup = 'Heads'
def toss(self):
if random.randint(0, 1) == 0:
self.sideup = 'Heads'
else:
self.sideup = 'Tails'
def get_sideup(self):
return self.sideup
mycoin = Coin()
print(mycoin.sideup)
print(mycoin.get_sideup())
mycoin.toss()
print(mycoin.get_sideup())
<|reserved_special_token_1|>
#-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Nirvana
#
# Created: 07/06/2014
# Copyright: (c) Nirvana 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
import random
class Coin(object):
def __init__(self):
self.sideup = "Heads"
def toss(self):
if random.randint(0,1)==0:
self.sideup = "Heads"
else:
self.sideup = "Tails"
def get_sideup(self):
return self.sideup
mycoin=Coin()
print (mycoin.sideup)
print (mycoin.get_sideup())
mycoin.toss()
print (mycoin.get_sideup())
|
flexible
|
{
"blob_id": "eb246beb05249f5dfde019b773698ba3bb1b1118",
"index": 544,
"step-1": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\n<mask token>\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-3": "<mask token>\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\nmycoin = Coin()\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-4": "import random\n\n\nclass Coin(object):\n\n def __init__(self):\n self.sideup = 'Heads'\n\n def toss(self):\n if random.randint(0, 1) == 0:\n self.sideup = 'Heads'\n else:\n self.sideup = 'Tails'\n\n def get_sideup(self):\n return self.sideup\n\n\nmycoin = Coin()\nprint(mycoin.sideup)\nprint(mycoin.get_sideup())\nmycoin.toss()\nprint(mycoin.get_sideup())\n",
"step-5": "#-------------------------------------------------------------------------------\n# Name: module1\n# Purpose:\n#\n# Author: Nirvana\n#\n# Created: 07/06/2014\n# Copyright: (c) Nirvana 2014\n# Licence: <your licence>\n#-------------------------------------------------------------------------------\n\nimport random\n\nclass Coin(object):\n def __init__(self):\n self.sideup = \"Heads\"\n\n def toss(self):\n if random.randint(0,1)==0:\n self.sideup = \"Heads\"\n else:\n self.sideup = \"Tails\"\n\n def get_sideup(self):\n return self.sideup\n\nmycoin=Coin()\nprint (mycoin.sideup)\nprint (mycoin.get_sideup())\nmycoin.toss()\nprint (mycoin.get_sideup())\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import datetime
if __name__ == "__main__" :
keys = {'a','e','i', 'o', 'u', 'y'}
values = [1]
dictionnaire = {cle : list(values) for cle in keys}
print("dictionnaire : ", dictionnaire)
values.append(2)
#for cle in keys : dictionnaire.update({cle:values})
#dictionnaire.update({cle2 : list(values) for cle2 in keys})
#dictionnaire = {cle : list(values) for cle in keys}
#for cle in list(dictionnaire) : dictionnaire.update({cle:values})
for cle in dictionnaire.keys() : dictionnaire.update({cle:values})
print("dictionnaire : ", dictionnaire)
|
normal
|
{
"blob_id": "468c070aebff3124927c5595d68bb94321dd75e5",
"index": 4406,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n keys = {'a', 'e', 'i', 'o', 'u', 'y'}\n values = [1]\n dictionnaire = {cle: list(values) for cle in keys}\n print('dictionnaire : ', dictionnaire)\n values.append(2)\n for cle in dictionnaire.keys():\n dictionnaire.update({cle: values})\n print('dictionnaire : ', dictionnaire)\n",
"step-3": "import datetime\nif __name__ == '__main__':\n keys = {'a', 'e', 'i', 'o', 'u', 'y'}\n values = [1]\n dictionnaire = {cle: list(values) for cle in keys}\n print('dictionnaire : ', dictionnaire)\n values.append(2)\n for cle in dictionnaire.keys():\n dictionnaire.update({cle: values})\n print('dictionnaire : ', dictionnaire)\n",
"step-4": "import datetime\n\nif __name__ == \"__main__\" :\n\n keys = {'a','e','i', 'o', 'u', 'y'}\n values = [1]\n\n dictionnaire = {cle : list(values) for cle in keys}\n print(\"dictionnaire : \", dictionnaire)\n\n values.append(2)\n\n #for cle in keys : dictionnaire.update({cle:values})\n \n #dictionnaire.update({cle2 : list(values) for cle2 in keys})\n \n #dictionnaire = {cle : list(values) for cle in keys}\n \n #for cle in list(dictionnaire) : dictionnaire.update({cle:values})\n \n for cle in dictionnaire.keys() : dictionnaire.update({cle:values})\n\n print(\"dictionnaire : \", dictionnaire)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class CabbageController:
<|reserved_special_token_0|>
def service(self):
X = tf.placeholder(tf.float32, shape=[None, 4])
W = tf.Variable(tf.random_normal([4, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, 'cabbage/saved_model/saved.ckpt')
data = [[self._avg_temp, self._min_temp, self._max_temp, self.
_rain_fall]]
arr = np.array(data, dtype=np.float32)
dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})
return dict[0]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CabbageController:
def __init__(self):
self._avg_temp = 1
self._min_temp = 2
self._max_temp = 3
self._rain_fall = 4
def service(self):
X = tf.placeholder(tf.float32, shape=[None, 4])
W = tf.Variable(tf.random_normal([4, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, 'cabbage/saved_model/saved.ckpt')
data = [[self._avg_temp, self._min_temp, self._max_temp, self.
_rain_fall]]
arr = np.array(data, dtype=np.float32)
dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})
return dict[0]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CabbageController:
def __init__(self):
self._avg_temp = 1
self._min_temp = 2
self._max_temp = 3
self._rain_fall = 4
def service(self):
X = tf.placeholder(tf.float32, shape=[None, 4])
W = tf.Variable(tf.random_normal([4, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, 'cabbage/saved_model/saved.ckpt')
data = [[self._avg_temp, self._min_temp, self._max_temp, self.
_rain_fall]]
arr = np.array(data, dtype=np.float32)
dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})
return dict[0]
def exec(self, flag):
if flag == 'd':
url = (
'http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain'
)
d = KrxCrawler(url)
d.scrap()
elif flag == 'e':
url = ''
e = sm('005930')
e.selWeb()
elif flag == 'f':
scat = st()
scat.test()
<|reserved_special_token_1|>
import tensorflow as tf
from model import CabbageModel
import numpy as np
from krx import KrxCrawler
from naver_stock import StockModel as sm
from scattertest import scattertest as st
class CabbageController:
def __init__(self):
self._avg_temp = 1
self._min_temp = 2
self._max_temp = 3
self._rain_fall = 4
def service(self):
X = tf.placeholder(tf.float32, shape=[None, 4])
W = tf.Variable(tf.random_normal([4, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, 'cabbage/saved_model/saved.ckpt')
data = [[self._avg_temp, self._min_temp, self._max_temp, self.
_rain_fall]]
arr = np.array(data, dtype=np.float32)
dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})
return dict[0]
def exec(self, flag):
if flag == 'd':
url = (
'http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain'
)
d = KrxCrawler(url)
d.scrap()
elif flag == 'e':
url = ''
e = sm('005930')
e.selWeb()
elif flag == 'f':
scat = st()
scat.test()
<|reserved_special_token_1|>
import tensorflow as tf
from model import CabbageModel
import numpy as np
from krx import KrxCrawler
from naver_stock import StockModel as sm
from scattertest import scattertest as st
class CabbageController:
def __init__(self):
#def __init__(self, avg_temp, min_temp, max_temp, rain_fall):
#self._avg_temp = avg_temp
#self._min_temp = min_temp
#self._max_temp= max_temp
#self._rain_fall = rain_fall
self._avg_temp = 1
self._min_temp = 2
self._max_temp = 3
self._rain_fall = 4
def service(self):
#NONE -> 행 값
#4 -> 열 값
X = tf.placeholder(tf.float32, shape=[None,4])
#Y는 미래의 값이기 때문에 현재 존재할 수 가없다. Y의 값을 예측하는 것
W = tf.Variable(tf.random_normal([4,1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
saver = tf.train.Saver()
#텐서 세션의 영역
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, 'cabbage/saved_model/saved.ckpt')
#매트릭스 구조
data = [[self._avg_temp, self._min_temp, self._max_temp, self._rain_fall],]
arr = np.array(data, dtype = np.float32)
dict = sess.run(tf.matmul(X,W) +b,{X: arr[0:4]})
return dict[0]
def exec(self, flag):
if flag == 'd':
url = "http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain"
d = KrxCrawler(url)
d.scrap()
elif flag == 'e':
url = ''
e = sm('005930')
e.selWeb()
#e.scrap()
elif flag == 'f':
scat = st()
scat.test()
|
flexible
|
{
"blob_id": "90a220775efcc8ff9e83f1a1f011f424ddc3476d",
"index": 4487,
"step-1": "<mask token>\n\n\nclass CabbageController:\n <mask token>\n\n def service(self):\n X = tf.placeholder(tf.float32, shape=[None, 4])\n W = tf.Variable(tf.random_normal([4, 1]), name='weight')\n b = tf.Variable(tf.random_normal([1]), name='bias')\n saver = tf.train.Saver()\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n saver.restore(sess, 'cabbage/saved_model/saved.ckpt')\n data = [[self._avg_temp, self._min_temp, self._max_temp, self.\n _rain_fall]]\n arr = np.array(data, dtype=np.float32)\n dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})\n return dict[0]\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass CabbageController:\n\n def __init__(self):\n self._avg_temp = 1\n self._min_temp = 2\n self._max_temp = 3\n self._rain_fall = 4\n\n def service(self):\n X = tf.placeholder(tf.float32, shape=[None, 4])\n W = tf.Variable(tf.random_normal([4, 1]), name='weight')\n b = tf.Variable(tf.random_normal([1]), name='bias')\n saver = tf.train.Saver()\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n saver.restore(sess, 'cabbage/saved_model/saved.ckpt')\n data = [[self._avg_temp, self._min_temp, self._max_temp, self.\n _rain_fall]]\n arr = np.array(data, dtype=np.float32)\n dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})\n return dict[0]\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass CabbageController:\n\n def __init__(self):\n self._avg_temp = 1\n self._min_temp = 2\n self._max_temp = 3\n self._rain_fall = 4\n\n def service(self):\n X = tf.placeholder(tf.float32, shape=[None, 4])\n W = tf.Variable(tf.random_normal([4, 1]), name='weight')\n b = tf.Variable(tf.random_normal([1]), name='bias')\n saver = tf.train.Saver()\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n saver.restore(sess, 'cabbage/saved_model/saved.ckpt')\n data = [[self._avg_temp, self._min_temp, self._max_temp, self.\n _rain_fall]]\n arr = np.array(data, dtype=np.float32)\n dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})\n return dict[0]\n\n def exec(self, flag):\n if flag == 'd':\n url = (\n 'http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain'\n )\n d = KrxCrawler(url)\n d.scrap()\n elif flag == 'e':\n url = ''\n e = sm('005930')\n e.selWeb()\n elif flag == 'f':\n scat = st()\n scat.test()\n",
"step-4": "import tensorflow as tf\nfrom model import CabbageModel\nimport numpy as np\nfrom krx import KrxCrawler\nfrom naver_stock import StockModel as sm\nfrom scattertest import scattertest as st\n\n\nclass CabbageController:\n\n def __init__(self):\n self._avg_temp = 1\n self._min_temp = 2\n self._max_temp = 3\n self._rain_fall = 4\n\n def service(self):\n X = tf.placeholder(tf.float32, shape=[None, 4])\n W = tf.Variable(tf.random_normal([4, 1]), name='weight')\n b = tf.Variable(tf.random_normal([1]), name='bias')\n saver = tf.train.Saver()\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n saver.restore(sess, 'cabbage/saved_model/saved.ckpt')\n data = [[self._avg_temp, self._min_temp, self._max_temp, self.\n _rain_fall]]\n arr = np.array(data, dtype=np.float32)\n dict = sess.run(tf.matmul(X, W) + b, {X: arr[0:4]})\n return dict[0]\n\n def exec(self, flag):\n if flag == 'd':\n url = (\n 'http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain'\n )\n d = KrxCrawler(url)\n d.scrap()\n elif flag == 'e':\n url = ''\n e = sm('005930')\n e.selWeb()\n elif flag == 'f':\n scat = st()\n scat.test()\n",
"step-5": "import tensorflow as tf\nfrom model import CabbageModel\nimport numpy as np\nfrom krx import KrxCrawler\nfrom naver_stock import StockModel as sm\nfrom scattertest import scattertest as st\n\nclass CabbageController:\n def __init__(self):\n #def __init__(self, avg_temp, min_temp, max_temp, rain_fall):\n #self._avg_temp = avg_temp\n #self._min_temp = min_temp\n #self._max_temp= max_temp\n #self._rain_fall = rain_fall\n self._avg_temp = 1\n self._min_temp = 2\n self._max_temp = 3\n self._rain_fall = 4\n\n def service(self):\n #NONE -> 행 값\n #4 -> 열 값\n X = tf.placeholder(tf.float32, shape=[None,4])\n #Y는 미래의 값이기 때문에 현재 존재할 수 가없다. Y의 값을 예측하는 것\n W = tf.Variable(tf.random_normal([4,1]), name='weight')\n b = tf.Variable(tf.random_normal([1]), name='bias')\n saver = tf.train.Saver()\n #텐서 세션의 영역\n with tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n saver.restore(sess, 'cabbage/saved_model/saved.ckpt')\n #매트릭스 구조\n data = [[self._avg_temp, self._min_temp, self._max_temp, self._rain_fall],]\n arr = np.array(data, dtype = np.float32)\n dict = sess.run(tf.matmul(X,W) +b,{X: arr[0:4]})\n return dict[0]\n\n def exec(self, flag):\n if flag == 'd':\n url = \"http://kind.krx.co.kr/disclosureSimpleSearch.do?method=disclosureSimpleSearchMain\"\n d = KrxCrawler(url)\n d.scrap()\n elif flag == 'e':\n url = ''\n e = sm('005930')\n e.selWeb()\n #e.scrap()\n elif flag == 'f':\n scat = st()\n scat.test()",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from glob import glob
from PIL import Image
import numpy as np
from tqdm import tqdm
import cv2
import os
import matplotlib.pyplot as plt
np.set_printoptions(precision=3, suppress=True)
def get_index(path):
"""
get the length of index for voc2012 dataset.
path: the index of train,val or test path
"""
with open(path,'r') as f:
zz = f.readlines()
return [index.split("\n")[0] for index in zz]
def show_examples(images_base, labels_base, index_list, output_path):
results= []
for index in tqdm(index_list):
img = cv2.imread(os.path.join(images_base, index+".jpg"))
# lab = cv2.imread(os.path.join(labels_base, index+".png"), 0)
lab = np.array(Image.open(os.path.join(labels_base, index+".png")).convert('P'))
results+= np.unique(lab).tolist()
#
# plt.figure(figsize=(4,2))
# plt.subplot(121)
# plt.imshow(img)
# plt.title("images")
# plt.subplot(122)
# plt.imshow(lab)
# plt.title('label')
# plt.tight_layout()
# plt.savefig("%s/visual_%s.png"%(output_path, index), dpi=300)
# plt.show()
return list(set(results))
def get_info(label_dir):
label_path = glob("%s/*" % label_dir)
total_area = []
total_number = []
for label_name in tqdm(label_path):
lab = np.array(Image.open(label_name).convert('P'))
# print(lab.shape)
masks = [(lab == v) for v in range(21)]
# get each class area of images
zz = np.mean(masks, axis =(1, 2))
total_area.append(zz.copy())
# get exist class of images
zz[zz > 0] = 1
total_number.append(zz)
print(np.sum(total_number, axis=0))
print(np.sum(total_area, axis=0))
if __name__=="__main__":
import shutil
output_dir = "visual_results"
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
index_dir = '/data/VOCdevkit/VOC2012/ImageSets/Segmentation'
imge_dir = "/data/VOCdevkit/VOC2012/JPEGImages"
label_dir = "/data/VOCdevkit/VOC2012/SegmentationClass"
print("train_index:", len(get_index( os.path.join(index_dir, "train.txt") ) ) ) # 1464
print("val_index:", len( get_index( os.path.join(index_dir, "val.txt") ) ) ) # 1449
print("test_index:", len( get_index( os.path.join(index_dir, "test.txt") ) ) ) #1456
train_results= show_examples(imge_dir, label_dir, get_index(os.path.join(index_dir, "train.txt")), output_dir)
train_results.sort()
print("train label:", len(train_results), train_results)
get_info(label_dir)
"""
train label: 20 [0, 14, 19, 33, 37, 38, 52, 57, 72, 75, 89, 94, 108, 112, 113, 128, 132, 147, 150, 220]
number of each class:
[2903. 178. 144. 208. 150. 183. 152. 255. 250. 271. 135. 157. 249. 147. 157. 888. 167. 120. 183. 167. 157.]
are of each class:
[2019.413 21.703 8.608 23.93 16.14 19.298 49.044 40.491
68.606 27.83 28.275 33.941 51.712 27.909 30.196 139.84
16.282 22.923 39.572 44.975 22.053]
"""
|
normal
|
{
"blob_id": "b1b478965ad939a98478b19b4a94f3250167e25a",
"index": 2189,
"step-1": "<mask token>\n\n\ndef show_examples(images_base, labels_base, index_list, output_path):\n results = []\n for index in tqdm(index_list):\n img = cv2.imread(os.path.join(images_base, index + '.jpg'))\n lab = np.array(Image.open(os.path.join(labels_base, index + '.png')\n ).convert('P'))\n results += np.unique(lab).tolist()\n return list(set(results))\n\n\ndef get_info(label_dir):\n label_path = glob('%s/*' % label_dir)\n total_area = []\n total_number = []\n for label_name in tqdm(label_path):\n lab = np.array(Image.open(label_name).convert('P'))\n masks = [(lab == v) for v in range(21)]\n zz = np.mean(masks, axis=(1, 2))\n total_area.append(zz.copy())\n zz[zz > 0] = 1\n total_number.append(zz)\n print(np.sum(total_number, axis=0))\n print(np.sum(total_area, axis=0))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_index(path):\n \"\"\"\n get the length of index for voc2012 dataset.\n path: the index of train,val or test path\n \"\"\"\n with open(path, 'r') as f:\n zz = f.readlines()\n return [index.split('\\n')[0] for index in zz]\n\n\ndef show_examples(images_base, labels_base, index_list, output_path):\n results = []\n for index in tqdm(index_list):\n img = cv2.imread(os.path.join(images_base, index + '.jpg'))\n lab = np.array(Image.open(os.path.join(labels_base, index + '.png')\n ).convert('P'))\n results += np.unique(lab).tolist()\n return list(set(results))\n\n\ndef get_info(label_dir):\n label_path = glob('%s/*' % label_dir)\n total_area = []\n total_number = []\n for label_name in tqdm(label_path):\n lab = np.array(Image.open(label_name).convert('P'))\n masks = [(lab == v) for v in range(21)]\n zz = np.mean(masks, axis=(1, 2))\n total_area.append(zz.copy())\n zz[zz > 0] = 1\n total_number.append(zz)\n print(np.sum(total_number, axis=0))\n print(np.sum(total_area, axis=0))\n\n\n<mask token>\n",
"step-3": "<mask token>\nnp.set_printoptions(precision=3, suppress=True)\n\n\ndef get_index(path):\n \"\"\"\n get the length of index for voc2012 dataset.\n path: the index of train,val or test path\n \"\"\"\n with open(path, 'r') as f:\n zz = f.readlines()\n return [index.split('\\n')[0] for index in zz]\n\n\ndef show_examples(images_base, labels_base, index_list, output_path):\n results = []\n for index in tqdm(index_list):\n img = cv2.imread(os.path.join(images_base, index + '.jpg'))\n lab = np.array(Image.open(os.path.join(labels_base, index + '.png')\n ).convert('P'))\n results += np.unique(lab).tolist()\n return list(set(results))\n\n\ndef get_info(label_dir):\n label_path = glob('%s/*' % label_dir)\n total_area = []\n total_number = []\n for label_name in tqdm(label_path):\n lab = np.array(Image.open(label_name).convert('P'))\n masks = [(lab == v) for v in range(21)]\n zz = np.mean(masks, axis=(1, 2))\n total_area.append(zz.copy())\n zz[zz > 0] = 1\n total_number.append(zz)\n print(np.sum(total_number, axis=0))\n print(np.sum(total_area, axis=0))\n\n\nif __name__ == '__main__':\n import shutil\n output_dir = 'visual_results'\n if os.path.exists(output_dir):\n shutil.rmtree(output_dir)\n os.makedirs(output_dir)\n index_dir = '/data/VOCdevkit/VOC2012/ImageSets/Segmentation'\n imge_dir = '/data/VOCdevkit/VOC2012/JPEGImages'\n label_dir = '/data/VOCdevkit/VOC2012/SegmentationClass'\n print('train_index:', len(get_index(os.path.join(index_dir, 'train.txt'))))\n print('val_index:', len(get_index(os.path.join(index_dir, 'val.txt'))))\n print('test_index:', len(get_index(os.path.join(index_dir, 'test.txt'))))\n train_results = show_examples(imge_dir, label_dir, get_index(os.path.\n join(index_dir, 'train.txt')), output_dir)\n train_results.sort()\n print('train label:', len(train_results), train_results)\n get_info(label_dir)\n<mask token>\n",
"step-4": "from glob import glob\nfrom PIL import Image\nimport numpy as np\nfrom tqdm import tqdm\nimport cv2\nimport os\nimport matplotlib.pyplot as plt\nnp.set_printoptions(precision=3, suppress=True)\n\n\ndef get_index(path):\n \"\"\"\n get the length of index for voc2012 dataset.\n path: the index of train,val or test path\n \"\"\"\n with open(path, 'r') as f:\n zz = f.readlines()\n return [index.split('\\n')[0] for index in zz]\n\n\ndef show_examples(images_base, labels_base, index_list, output_path):\n results = []\n for index in tqdm(index_list):\n img = cv2.imread(os.path.join(images_base, index + '.jpg'))\n lab = np.array(Image.open(os.path.join(labels_base, index + '.png')\n ).convert('P'))\n results += np.unique(lab).tolist()\n return list(set(results))\n\n\ndef get_info(label_dir):\n label_path = glob('%s/*' % label_dir)\n total_area = []\n total_number = []\n for label_name in tqdm(label_path):\n lab = np.array(Image.open(label_name).convert('P'))\n masks = [(lab == v) for v in range(21)]\n zz = np.mean(masks, axis=(1, 2))\n total_area.append(zz.copy())\n zz[zz > 0] = 1\n total_number.append(zz)\n print(np.sum(total_number, axis=0))\n print(np.sum(total_area, axis=0))\n\n\nif __name__ == '__main__':\n import shutil\n output_dir = 'visual_results'\n if os.path.exists(output_dir):\n shutil.rmtree(output_dir)\n os.makedirs(output_dir)\n index_dir = '/data/VOCdevkit/VOC2012/ImageSets/Segmentation'\n imge_dir = '/data/VOCdevkit/VOC2012/JPEGImages'\n label_dir = '/data/VOCdevkit/VOC2012/SegmentationClass'\n print('train_index:', len(get_index(os.path.join(index_dir, 'train.txt'))))\n print('val_index:', len(get_index(os.path.join(index_dir, 'val.txt'))))\n print('test_index:', len(get_index(os.path.join(index_dir, 'test.txt'))))\n train_results = show_examples(imge_dir, label_dir, get_index(os.path.\n join(index_dir, 'train.txt')), output_dir)\n train_results.sort()\n print('train label:', len(train_results), train_results)\n get_info(label_dir)\n<mask token>\n",
"step-5": "from glob import glob\nfrom PIL import Image\nimport numpy as np\nfrom tqdm import tqdm\nimport cv2\nimport os\nimport matplotlib.pyplot as plt\n\nnp.set_printoptions(precision=3, suppress=True)\n\n\ndef get_index(path):\n \"\"\"\n get the length of index for voc2012 dataset.\n path: the index of train,val or test path\n \"\"\"\n with open(path,'r') as f:\n zz = f.readlines()\n return [index.split(\"\\n\")[0] for index in zz]\n\n\ndef show_examples(images_base, labels_base, index_list, output_path):\n results= []\n for index in tqdm(index_list):\n img = cv2.imread(os.path.join(images_base, index+\".jpg\"))\n # lab = cv2.imread(os.path.join(labels_base, index+\".png\"), 0)\n lab = np.array(Image.open(os.path.join(labels_base, index+\".png\")).convert('P'))\n results+= np.unique(lab).tolist()\n #\n # plt.figure(figsize=(4,2))\n # plt.subplot(121)\n # plt.imshow(img)\n # plt.title(\"images\")\n # plt.subplot(122)\n # plt.imshow(lab)\n # plt.title('label')\n # plt.tight_layout()\n # plt.savefig(\"%s/visual_%s.png\"%(output_path, index), dpi=300)\n # plt.show()\n\n return list(set(results))\n\n\ndef get_info(label_dir):\n label_path = glob(\"%s/*\" % label_dir)\n total_area = []\n total_number = []\n\n for label_name in tqdm(label_path):\n lab = np.array(Image.open(label_name).convert('P'))\n # print(lab.shape)\n masks = [(lab == v) for v in range(21)]\n # get each class area of images\n zz = np.mean(masks, axis =(1, 2))\n total_area.append(zz.copy())\n # get exist class of images\n zz[zz > 0] = 1\n total_number.append(zz)\n\n print(np.sum(total_number, axis=0))\n print(np.sum(total_area, axis=0))\n\n\nif __name__==\"__main__\":\n\n import shutil\n output_dir = \"visual_results\"\n if os.path.exists(output_dir):\n shutil.rmtree(output_dir)\n os.makedirs(output_dir)\n\n index_dir = '/data/VOCdevkit/VOC2012/ImageSets/Segmentation'\n imge_dir = \"/data/VOCdevkit/VOC2012/JPEGImages\"\n label_dir = \"/data/VOCdevkit/VOC2012/SegmentationClass\"\n print(\"train_index:\", len(get_index( os.path.join(index_dir, \"train.txt\") ) ) ) # 1464\n print(\"val_index:\", len( get_index( os.path.join(index_dir, \"val.txt\") ) ) ) # 1449\n print(\"test_index:\", len( get_index( os.path.join(index_dir, \"test.txt\") ) ) ) #1456\n\n train_results= show_examples(imge_dir, label_dir, get_index(os.path.join(index_dir, \"train.txt\")), output_dir)\n train_results.sort()\n print(\"train label:\", len(train_results), train_results)\n get_info(label_dir)\n\n\n\"\"\"\ntrain label: 20 [0, 14, 19, 33, 37, 38, 52, 57, 72, 75, 89, 94, 108, 112, 113, 128, 132, 147, 150, 220]\n\nnumber of each class:\n[2903. 178. 144. 208. 150. 183. 152. 255. 250. 271. 135. 157. 249. 147. 157. 888. 167. 120. 183. 167. 157.]\n\nare of each class:\n[2019.413 21.703 8.608 23.93 16.14 19.298 49.044 40.491\n 68.606 27.83 28.275 33.941 51.712 27.909 30.196 139.84\n 16.282 22.923 39.572 44.975 22.053]\n\"\"\"",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def blackbox(name, backend, targets, params, target='target', path='/probe',
labels=None):
labels = {} if labels is None else labels
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {'job_name': name, 'metrics_path': path, 'params': params,
'static_configs': [{'targets': sorted(filtered_targets), 'labels':
labels}], 'relabel_configs': [{'source_labels': ['__address__'],
'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,
'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],
'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},
{'source_labels': [], 'regex': '.*', 'target_label': '__address__',
'replacement': backend}]}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],
dict) else {None: spec['monitor']['url']}
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(lib.get_nodes_with_package(package, domain
).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {'job_name': package + ('-%s' % url_id if
url_id else ''), 'metrics_path': url.path, 'scheme': url.
scheme, 'static_configs': [{'targets': sorted(targets)}]}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor'][
'labels']
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]
for layer in layers:
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {
'layer': [layer]}, labels={'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes
[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
external = {'job_name': 'external', 'file_sd_configs': [{'files': [
'/etc/prometheus/external/*.yaml']}]}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s',
'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}
scrape_configs.append(puppet)
vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':
[{'targets': ['provision.event.dreamhack.se:9272']}]}
scrape_configs.append(vcenter)
relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':
'${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
<|reserved_special_token_0|>
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({'job_name': 'prometheus', 'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
info['prometheus']['current_event'] = lib.get_current_event()
return info
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def blackbox(name, backend, targets, params, target='target', path='/probe',
labels=None):
labels = {} if labels is None else labels
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {'job_name': name, 'metrics_path': path, 'params': params,
'static_configs': [{'targets': sorted(filtered_targets), 'labels':
labels}], 'relabel_configs': [{'source_labels': ['__address__'],
'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,
'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],
'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},
{'source_labels': [], 'regex': '.*', 'target_label': '__address__',
'replacement': backend}]}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],
dict) else {None: spec['monitor']['url']}
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(lib.get_nodes_with_package(package, domain
).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {'job_name': package + ('-%s' % url_id if
url_id else ''), 'metrics_path': url.path, 'scheme': url.
scheme, 'static_configs': [{'targets': sorted(targets)}]}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor'][
'labels']
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]
for layer in layers:
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {
'layer': [layer]}, labels={'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes
[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
external = {'job_name': 'external', 'file_sd_configs': [{'files': [
'/etc/prometheus/external/*.yaml']}]}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s',
'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}
scrape_configs.append(puppet)
vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':
[{'targets': ['provision.event.dreamhack.se:9272']}]}
scrape_configs.append(vcenter)
relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':
'${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
def requires(host, *args):
return ['apache(ldap)']
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({'job_name': 'prometheus', 'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
info['prometheus']['current_event'] = lib.get_current_event()
return info
<|reserved_special_token_1|>
<|reserved_special_token_0|>
MANIFEST_PATH = '/etc/manifest'
HTTP_BASIC_AUTH = None
def blackbox(name, backend, targets, params, target='target', path='/probe',
labels=None):
labels = {} if labels is None else labels
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {'job_name': name, 'metrics_path': path, 'params': params,
'static_configs': [{'targets': sorted(filtered_targets), 'labels':
labels}], 'relabel_configs': [{'source_labels': ['__address__'],
'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,
'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],
'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},
{'source_labels': [], 'regex': '.*', 'target_label': '__address__',
'replacement': backend}]}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],
dict) else {None: spec['monitor']['url']}
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(lib.get_nodes_with_package(package, domain
).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {'job_name': package + ('-%s' % url_id if
url_id else ''), 'metrics_path': url.path, 'scheme': url.
scheme, 'static_configs': [{'targets': sorted(targets)}]}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor'][
'labels']
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]
for layer in layers:
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {
'layer': [layer]}, labels={'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes
[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
external = {'job_name': 'external', 'file_sd_configs': [{'files': [
'/etc/prometheus/external/*.yaml']}]}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s',
'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}
scrape_configs.append(puppet)
vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':
[{'targets': ['provision.event.dreamhack.se:9272']}]}
scrape_configs.append(vcenter)
relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':
'${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
def requires(host, *args):
return ['apache(ldap)']
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({'job_name': 'prometheus', 'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
info['prometheus']['current_event'] = lib.get_current_event()
return info
<|reserved_special_token_1|>
import lib
import urlparse
import yaml
MANIFEST_PATH = '/etc/manifest'
HTTP_BASIC_AUTH = None
def blackbox(name, backend, targets, params, target='target', path='/probe',
labels=None):
labels = {} if labels is None else labels
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {'job_name': name, 'metrics_path': path, 'params': params,
'static_configs': [{'targets': sorted(filtered_targets), 'labels':
labels}], 'relabel_configs': [{'source_labels': ['__address__'],
'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,
'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],
'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},
{'source_labels': [], 'regex': '.*', 'target_label': '__address__',
'replacement': backend}]}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],
dict) else {None: spec['monitor']['url']}
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(lib.get_nodes_with_package(package, domain
).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {'job_name': package + ('-%s' % url_id if
url_id else ''), 'metrics_path': url.path, 'scheme': url.
scheme, 'static_configs': [{'targets': sorted(targets)}]}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor'][
'labels']
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]
for layer in layers:
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {
'layer': [layer]}, labels={'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes
[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
external = {'job_name': 'external', 'file_sd_configs': [{'files': [
'/etc/prometheus/external/*.yaml']}]}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s',
'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}
scrape_configs.append(puppet)
vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',
'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':
[{'targets': ['provision.event.dreamhack.se:9272']}]}
scrape_configs.append(vcenter)
relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':
'${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
def requires(host, *args):
return ['apache(ldap)']
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({'job_name': 'prometheus', 'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
info['prometheus']['current_event'] = lib.get_current_event()
return info
<|reserved_special_token_1|>
# Copyright 2018 dhtech
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file
import lib
import urlparse
import yaml
MANIFEST_PATH = '/etc/manifest'
HTTP_BASIC_AUTH = None
def blackbox(name, backend, targets, params,
target='target', path='/probe', labels=None):
labels = {} if labels is None else labels
# Strip banned OSes
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {
'job_name': name,
'metrics_path': path,
'params': params,
'static_configs': [{
'targets': sorted(filtered_targets),
'labels': labels
}],
'relabel_configs': [{
'source_labels': ['__address__'],
'regex': '(.*)(:80)?',
'target_label': '__param_%s' % target,
'replacement': '${1}',
}, {
'source_labels': ['__param_%s' % target],
'regex': '(.*)',
'target_label': 'instance',
'replacement': '${1}',
}, {
'source_labels': [],
'regex': '.*',
'target_label': '__address__',
'replacement': backend,
}]
}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
# Find services that wants to be monitored
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = (spec['monitor']['url']
if isinstance(spec['monitor']['url'], dict) else
{None: spec['monitor']['url']})
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(
lib.get_nodes_with_package(package, domain).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {
'job_name': package + ('-%s' % url_id if url_id else ''),
'metrics_path': url.path,
'scheme': url.scheme,
'static_configs': [
{'targets': sorted(targets)}
],
}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor']['labels']
# Only allow authentication over https
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
# Layer specific monitoring
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [x+':22' for x in set(hosts) - set(ssh_mute)]
# SNMP
for layer in layers:
# TODO(bluecmd): Use options for this
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox(
'snmp_%s' % layer, snmp_host,
snmp_nodes[layer], {'layer': [layer]}, labels={
'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
# SSH
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox(
'ssh_%s_%s' % (layer, ssh_host), fqdn,
ssh_nodes[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
# Add external service-discovery
external = {
'job_name': 'external',
'file_sd_configs': [{
'files': ['/etc/prometheus/external/*.yaml'],
}],
}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
# Event should scrape puppet.tech.dreamhack.se to get information about
# puppet runs
puppet = {
'job_name': 'puppet_runs',
'metrics_path': '/metrics',
'scrape_interval': '60s',
'scrape_timeout': '55s',
'static_configs': [{
'targets': ['puppet.tech.dreamhack.se:9100'],
}],
}
scrape_configs.append(puppet)
vcenter = {
'job_name': 'vmware_vcenter',
'metrics_path': '/metrics',
'scrape_interval': '60s',
'scrape_timeout': '55s',
'static_configs': [{
'targets': ['provision.event.dreamhack.se:9272'],
}],
}
scrape_configs.append(vcenter)
# Make sure that all metrics have a host label.
# This rule uses the existing host label if there is one,
# stripping of the port (which shouldn't be part of the host label anyway)
# *or* if that label does not exist it uses the instance label
# (again stripping of the port)
relabel = {
'regex': r':?([^:]*):?.*',
'separator': ':',
'replacement': '${1}',
'source_labels': ['host', 'instance'],
'target_label': 'host',
}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
def requires(host, *args):
return ['apache(ldap)']
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({
'job_name': 'prometheus',
'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
# Get current event
info['prometheus']['current_event'] = lib.get_current_event()
return info
# vim: ts=4: sts=4: sw=4: expandtab
|
flexible
|
{
"blob_id": "f489058c922d405754ad32a737f67bc03c08772b",
"index": 701,
"step-1": "<mask token>\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\n<mask token>\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-2": "<mask token>\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-3": "<mask token>\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-4": "import lib\nimport urlparse\nimport yaml\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-5": "# Copyright 2018 dhtech\n#\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file\nimport lib\nimport urlparse\nimport yaml\n\n\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params,\n target='target', path='/probe', labels=None):\n labels = {} if labels is None else labels\n # Strip banned OSes\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {\n 'job_name': name,\n 'metrics_path': path,\n 'params': params,\n 'static_configs': [{\n 'targets': sorted(filtered_targets),\n 'labels': labels\n }],\n 'relabel_configs': [{\n 'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?',\n 'target_label': '__param_%s' % target,\n 'replacement': '${1}',\n }, {\n 'source_labels': ['__param_%s' % target],\n 'regex': '(.*)',\n 'target_label': 'instance',\n 'replacement': '${1}',\n }, {\n 'source_labels': [],\n 'regex': '.*',\n 'target_label': '__address__',\n 'replacement': backend,\n }]\n }\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n\n basic_auth = lib.read_secret('services/monitoring:login')\n\n # Find services that wants to be monitored\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n\n urls = (spec['monitor']['url']\n if isinstance(spec['monitor']['url'], dict) else\n {None: spec['monitor']['url']})\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(\n lib.get_nodes_with_package(package, domain).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {\n 'job_name': package + ('-%s' % url_id if url_id else ''),\n 'metrics_path': url.path,\n 'scheme': url.scheme,\n 'static_configs': [\n {'targets': sorted(targets)}\n ],\n }\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor']['labels']\n # Only allow authentication over https\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n\n # Layer specific monitoring\n layers = lib.get_layers(domain)\n\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [x+':22' for x in set(hosts) - set(ssh_mute)]\n\n # SNMP\n for layer in layers:\n # TODO(bluecmd): Use options for this\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox(\n 'snmp_%s' % layer, snmp_host,\n snmp_nodes[layer], {'layer': [layer]}, labels={\n 'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n\n # SSH\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox(\n 'ssh_%s_%s' % (layer, ssh_host), fqdn,\n ssh_nodes[layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n\n # Add external service-discovery\n external = {\n 'job_name': 'external',\n 'file_sd_configs': [{\n 'files': ['/etc/prometheus/external/*.yaml'],\n }],\n }\n scrape_configs.append(external)\n\n if host.endswith('.event.dreamhack.se'):\n # Event should scrape puppet.tech.dreamhack.se to get information about\n # puppet runs\n puppet = {\n 'job_name': 'puppet_runs',\n 'metrics_path': '/metrics',\n 'scrape_interval': '60s',\n 'scrape_timeout': '55s',\n 'static_configs': [{\n 'targets': ['puppet.tech.dreamhack.se:9100'],\n }],\n }\n scrape_configs.append(puppet)\n\n vcenter = {\n 'job_name': 'vmware_vcenter',\n 'metrics_path': '/metrics',\n 'scrape_interval': '60s',\n 'scrape_timeout': '55s',\n 'static_configs': [{\n 'targets': ['provision.event.dreamhack.se:9272'],\n }],\n }\n scrape_configs.append(vcenter)\n\n # Make sure that all metrics have a host label.\n # This rule uses the existing host label if there is one,\n # stripping of the port (which shouldn't be part of the host label anyway)\n # *or* if that label does not exist it uses the instance label\n # (again stripping of the port)\n relabel = {\n 'regex': r':?([^:]*):?.*',\n 'separator': ':',\n 'replacement': '${1}',\n 'source_labels': ['host', 'instance'],\n 'target_label': 'host',\n }\n\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n\n info = {}\n\n local_targets = []\n local_targets.append({\n 'job_name': 'prometheus',\n 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n\n # Get current event\n info['prometheus']['current_event'] = lib.get_current_event()\n\n return info\n\n# vim: ts=4: sts=4: sw=4: expandtab\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
"""
VorRun
Runs Vorlax and plots wireframe output from Vorlax
(https://github.com/GalaxyHobo/VORLAX)
NOTE! Type: "%matplotlib auto" in iPython console to
switch to interactive plots, or "%matplotlib inline"
to switch to inline, in the console.
NOTE! Reads path to Vorlax .exe in "path.txt" file that resides in
same directory as vorRun.py. The path in that file must be on the
first line and begin with drive letter + colon, or "\". Assumes
C-drive if path begins with "\".
Lance Bays
"""
import os
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
import numpy as np
# Establish working directory with exe...
# Copy & paste absolute path on Local machine here within double quotes
# Read path to working directory
fout = open("path.txt", 'r')
userExePath=fout.readline()
fout.close()
# Split drive Letter from path
drive, exePath = userExePath.split("\\", 1)
# Handle case where user doesn't include drive in path —
# we will assume it's on the C drive.
if not drive: drive="C:"
# Run program
# Command-line instructions to change drive & directory, and run program
runString = drive + " && cd \\" + exePath + " && vorlax.exe"
os.system( runString)
# Read output file
fout = open(drive + "\\" + exePath + "\\VORLAX.WIRE", 'r')
lines=fout.readlines()
fout.close()
# Convert to numpy array
panelData=[]
for index, line in enumerate(lines):
panelData.append(np.array(list(map(float,lines[index].split()))))
panelData=np.array(panelData)
# Determine array of unique panel ID's
panelNums = np.unique(panelData[0:,0:1])
# Add subplot
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Plot the Vorlax wireframe (one side)
for index in panelNums:
ax.plot_wireframe(
panelData[panelData[:,0]==index][:,np.array([False,True,False,False,False])],
panelData[panelData[:,0]==index][:,np.array([False,False,True,False,False])],
panelData[panelData[:,0]==index][:,np.array([False,False,False,True,False])])
# Plot the mirror image (if symmetry is indicated in wire file)
for index in panelNums:
symFlag=panelData[panelData[:,0]==index][0,np.array([False,False,False,False,True])]
if symFlag==0 or symFlag==2:
ax.plot_wireframe(
panelData[panelData[:,0]==index][:,np.array([False,True,False,False,False])],
-1*panelData[panelData[:,0]==index][:,np.array([False,False,True,False,False])],
panelData[panelData[:,0]==index][:,np.array([False,False,False,True,False])])
# Format plot
ax.grid()
ax.set(ylabel='y-in',
xlabel='x-in',
zlabel='z-in',
title='')
ax.xaxis.label.set_size(16)
ax.yaxis.label.set_size(16)
ax.zaxis.label.set_size(16)
# Create super-set of data to establish ranges
x=panelData[:,1]
y=panelData[:,2]
negativey = -1 * panelData[:,2]
y=np.concatenate((y, negativey), axis=0)
z=panelData[:,3]
# Set equal scales on axes
ax.set_aspect('equal')
# Set ranges for plot
max_range = np.array([x.max() - x.min(),
y.max() - y.min(),
z.max() - z.min()]).max() / 2.0
# Compute midpoints in each direction
mid_x = (x.max() + x.min()) * 0.5
mid_y = (y.max() + y.min()) * 0.5
mid_z = (z.max() + z.min()) * 0.5
# Set final ranges
ax.set_xlim(mid_x - max_range, mid_x + max_range)
ax.set_ylim(mid_y - max_range, mid_y + max_range)
ax.set_zlim(mid_z - max_range, mid_z + max_range)
plt.show()
|
normal
|
{
"blob_id": "9aee715e976db632f0829a06cb9e0101c90512be",
"index": 2150,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfout.close()\n<mask token>\nif not drive:\n drive = 'C:'\n<mask token>\nos.system(runString)\n<mask token>\nfout.close()\n<mask token>\nfor index, line in enumerate(lines):\n panelData.append(np.array(list(map(float, lines[index].split()))))\n<mask token>\nfor index in panelNums:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], panelData[panelData[:, 0] ==\n index][:, np.array([False, False, True, False, False])], panelData[\n panelData[:, 0] == index][:, np.array([False, False, False, True, \n False])])\nfor index in panelNums:\n symFlag = panelData[panelData[:, 0] == index][0, np.array([False, False,\n False, False, True])]\n if symFlag == 0 or symFlag == 2:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], -1 * panelData[panelData[:,\n 0] == index][:, np.array([False, False, True, False, False])],\n panelData[panelData[:, 0] == index][:, np.array([False, False, \n False, True, False])])\nax.grid()\nax.set(ylabel='y-in', xlabel='x-in', zlabel='z-in', title='')\nax.xaxis.label.set_size(16)\nax.yaxis.label.set_size(16)\nax.zaxis.label.set_size(16)\n<mask token>\nax.set_aspect('equal')\n<mask token>\nax.set_xlim(mid_x - max_range, mid_x + max_range)\nax.set_ylim(mid_y - max_range, mid_y + max_range)\nax.set_zlim(mid_z - max_range, mid_z + max_range)\nplt.show()\n",
"step-3": "<mask token>\nfout = open('path.txt', 'r')\nuserExePath = fout.readline()\nfout.close()\ndrive, exePath = userExePath.split('\\\\', 1)\nif not drive:\n drive = 'C:'\nrunString = drive + ' && cd \\\\' + exePath + ' && vorlax.exe'\nos.system(runString)\nfout = open(drive + '\\\\' + exePath + '\\\\VORLAX.WIRE', 'r')\nlines = fout.readlines()\nfout.close()\npanelData = []\nfor index, line in enumerate(lines):\n panelData.append(np.array(list(map(float, lines[index].split()))))\npanelData = np.array(panelData)\npanelNums = np.unique(panelData[0:, 0:1])\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d')\nfor index in panelNums:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], panelData[panelData[:, 0] ==\n index][:, np.array([False, False, True, False, False])], panelData[\n panelData[:, 0] == index][:, np.array([False, False, False, True, \n False])])\nfor index in panelNums:\n symFlag = panelData[panelData[:, 0] == index][0, np.array([False, False,\n False, False, True])]\n if symFlag == 0 or symFlag == 2:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], -1 * panelData[panelData[:,\n 0] == index][:, np.array([False, False, True, False, False])],\n panelData[panelData[:, 0] == index][:, np.array([False, False, \n False, True, False])])\nax.grid()\nax.set(ylabel='y-in', xlabel='x-in', zlabel='z-in', title='')\nax.xaxis.label.set_size(16)\nax.yaxis.label.set_size(16)\nax.zaxis.label.set_size(16)\nx = panelData[:, 1]\ny = panelData[:, 2]\nnegativey = -1 * panelData[:, 2]\ny = np.concatenate((y, negativey), axis=0)\nz = panelData[:, 3]\nax.set_aspect('equal')\nmax_range = np.array([x.max() - x.min(), y.max() - y.min(), z.max() - z.min()]\n ).max() / 2.0\nmid_x = (x.max() + x.min()) * 0.5\nmid_y = (y.max() + y.min()) * 0.5\nmid_z = (z.max() + z.min()) * 0.5\nax.set_xlim(mid_x - max_range, mid_x + max_range)\nax.set_ylim(mid_y - max_range, mid_y + max_range)\nax.set_zlim(mid_z - max_range, mid_z + max_range)\nplt.show()\n",
"step-4": "<mask token>\nimport os\nfrom mpl_toolkits.mplot3d import axes3d\nimport matplotlib.pyplot as plt\nimport numpy as np\nfout = open('path.txt', 'r')\nuserExePath = fout.readline()\nfout.close()\ndrive, exePath = userExePath.split('\\\\', 1)\nif not drive:\n drive = 'C:'\nrunString = drive + ' && cd \\\\' + exePath + ' && vorlax.exe'\nos.system(runString)\nfout = open(drive + '\\\\' + exePath + '\\\\VORLAX.WIRE', 'r')\nlines = fout.readlines()\nfout.close()\npanelData = []\nfor index, line in enumerate(lines):\n panelData.append(np.array(list(map(float, lines[index].split()))))\npanelData = np.array(panelData)\npanelNums = np.unique(panelData[0:, 0:1])\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d')\nfor index in panelNums:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], panelData[panelData[:, 0] ==\n index][:, np.array([False, False, True, False, False])], panelData[\n panelData[:, 0] == index][:, np.array([False, False, False, True, \n False])])\nfor index in panelNums:\n symFlag = panelData[panelData[:, 0] == index][0, np.array([False, False,\n False, False, True])]\n if symFlag == 0 or symFlag == 2:\n ax.plot_wireframe(panelData[panelData[:, 0] == index][:, np.array([\n False, True, False, False, False])], -1 * panelData[panelData[:,\n 0] == index][:, np.array([False, False, True, False, False])],\n panelData[panelData[:, 0] == index][:, np.array([False, False, \n False, True, False])])\nax.grid()\nax.set(ylabel='y-in', xlabel='x-in', zlabel='z-in', title='')\nax.xaxis.label.set_size(16)\nax.yaxis.label.set_size(16)\nax.zaxis.label.set_size(16)\nx = panelData[:, 1]\ny = panelData[:, 2]\nnegativey = -1 * panelData[:, 2]\ny = np.concatenate((y, negativey), axis=0)\nz = panelData[:, 3]\nax.set_aspect('equal')\nmax_range = np.array([x.max() - x.min(), y.max() - y.min(), z.max() - z.min()]\n ).max() / 2.0\nmid_x = (x.max() + x.min()) * 0.5\nmid_y = (y.max() + y.min()) * 0.5\nmid_z = (z.max() + z.min()) * 0.5\nax.set_xlim(mid_x - max_range, mid_x + max_range)\nax.set_ylim(mid_y - max_range, mid_y + max_range)\nax.set_zlim(mid_z - max_range, mid_z + max_range)\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nVorRun\r\n\r\nRuns Vorlax and plots wireframe output from Vorlax\r\n(https://github.com/GalaxyHobo/VORLAX)\r\n\r\nNOTE! Type: \"%matplotlib auto\" in iPython console to \r\nswitch to interactive plots, or \"%matplotlib inline\" \r\nto switch to inline, in the console.\r\n\r\nNOTE! Reads path to Vorlax .exe in \"path.txt\" file that resides in\r\nsame directory as vorRun.py. The path in that file must be on the \r\nfirst line and begin with drive letter + colon, or \"\\\". Assumes\r\nC-drive if path begins with \"\\\".\r\n\r\nLance Bays\r\n\"\"\"\r\n\r\nimport os\r\nfrom mpl_toolkits.mplot3d import axes3d\r\nimport matplotlib.pyplot as plt\r\nimport numpy as np\r\n\r\n# Establish working directory with exe...\r\n# Copy & paste absolute path on Local machine here within double quotes\r\n\r\n# Read path to working directory\r\nfout = open(\"path.txt\", 'r')\r\nuserExePath=fout.readline()\r\nfout.close()\r\n\r\n# Split drive Letter from path\r\ndrive, exePath = userExePath.split(\"\\\\\", 1)\r\n\r\n# Handle case where user doesn't include drive in path —\r\n# we will assume it's on the C drive. \r\nif not drive: drive=\"C:\"\r\n\r\n# Run program\r\n# Command-line instructions to change drive & directory, and run program \r\nrunString = drive + \" && cd \\\\\" + exePath + \" && vorlax.exe\" \r\nos.system(\trunString)\r\n\r\n# Read output file\r\nfout = open(drive + \"\\\\\" + exePath + \"\\\\VORLAX.WIRE\", 'r')\r\nlines=fout.readlines()\r\nfout.close()\r\n\r\n# Convert to numpy array \r\npanelData=[]\r\nfor index, line in enumerate(lines):\r\n panelData.append(np.array(list(map(float,lines[index].split()))))\r\npanelData=np.array(panelData)\r\n\r\n# Determine array of unique panel ID's\r\npanelNums = np.unique(panelData[0:,0:1])\r\n\r\n# Add subplot\r\nfig = plt.figure()\r\nax = fig.add_subplot(111, projection='3d')\r\n\r\n# Plot the Vorlax wireframe\t(one side)\r\nfor index in panelNums:\r\n ax.plot_wireframe(\r\n panelData[panelData[:,0]==index][:,np.array([False,True,False,False,False])],\r\n panelData[panelData[:,0]==index][:,np.array([False,False,True,False,False])],\r\n panelData[panelData[:,0]==index][:,np.array([False,False,False,True,False])])\r\n\r\n# Plot the mirror image (if symmetry is indicated in wire file)\r\nfor index in panelNums:\r\n symFlag=panelData[panelData[:,0]==index][0,np.array([False,False,False,False,True])]\r\n if symFlag==0 or symFlag==2:\r\n ax.plot_wireframe(\r\n panelData[panelData[:,0]==index][:,np.array([False,True,False,False,False])],\r\n -1*panelData[panelData[:,0]==index][:,np.array([False,False,True,False,False])],\r\n panelData[panelData[:,0]==index][:,np.array([False,False,False,True,False])])\r\n\r\n# Format plot\r\nax.grid()\r\nax.set(ylabel='y-in',\r\n xlabel='x-in',\r\n zlabel='z-in',\r\n title='')\r\nax.xaxis.label.set_size(16)\r\nax.yaxis.label.set_size(16)\r\nax.zaxis.label.set_size(16)\r\n\r\n# Create super-set of data to establish ranges \r\nx=panelData[:,1]\r\ny=panelData[:,2]\r\nnegativey = -1 * panelData[:,2]\r\n\r\ny=np.concatenate((y, negativey), axis=0)\r\nz=panelData[:,3]\r\n\r\n# Set equal scales on axes\r\nax.set_aspect('equal')\r\n\r\n# Set ranges for plot\r\nmax_range = np.array([x.max() - x.min(),\r\n y.max() - y.min(),\r\n z.max() - z.min()]).max() / 2.0\r\n\r\n# Compute midpoints in each direction \r\nmid_x = (x.max() + x.min()) * 0.5 \r\nmid_y = (y.max() + y.min()) * 0.5\r\nmid_z = (z.max() + z.min()) * 0.5\r\n\r\n# Set final ranges\r\nax.set_xlim(mid_x - max_range, mid_x + max_range)\r\nax.set_ylim(mid_y - max_range, mid_y + max_range)\r\nax.set_zlim(mid_z - max_range, mid_z + max_range)\r\nplt.show()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''
Created on Sep 23, 2016
@author: Andrew
'''
from pymongo import MongoClient
import re
client = MongoClient()
atMentions = re.compile(ur"@\w+", flags=re.I|re.U)
atMidnight = re.compile(u"@midnight", flags=re.I|re.U)
hashtag = re.compile(ur"#\w+", flags=re.I|re.U)
features = [("usf fwa forward most", "usf fwa backward most", "usf fwa difference most", "usf fwa difference most sign"), ("usf fwa forward least", "usf fwa backward least", "usf fwa difference least", "usf fwa difference least sign"), ("usf fwa forward average", "usf fwa backward average", "usf fwa difference average", "usf fwa difference average sign")]
cols = ["GentlerSongs", "OlympicSongs", "OceanMovies", "BoringBlockbusters"]
p_values = []
for featureF, featureB, featureD, featureS in features:
print "Testing {} vs {}".format(featureF, featureB)
lessMoreDiff = [] #holds difference in feature value for less funny - more funny
for col in cols:
tweets = []
for tweet in client.tweets[col].find({"$and" : [{"total likes" : {"$gte" : 7}}, {featureF : {"$exists" : True}}, {featureB : {"$exists" : True}}]}):
if "punch words" not in tweet:
continue
if (tweet["punch words"] == None) or (tweet["punch words"] == []):
continue
for word in tweet["punch words"]:
if word == "None":
continue
if not word:
continue
mentions = atMentions.findall(tweet["text"])
if len(mentions) > 1: #if more than 1 person is mentione
continue
elif len(mentions) == 1:
if not atMidnight.match(mentions[0]): #if the mention someone other than @midngiht
continue
if len(hashtag.findall(tweet["text"])) > 1: #if there's more than 1 hashtag
continue
if (tweet[featureF] > 0) and (tweet[featureB] > 0):
tweet[featureD] = tweet[featureF] - tweet[featureB]
sign = 0 #assume forward and back are equal
if (tweet[featureF] - tweet[featureB]) > 0:
sign = 1
elif ((tweet[featureF] - tweet[featureB])) < 0:
sign = -1
tweet[featureS] = sign
client.tweets[col].update({"_id" : tweet["_id"]}, tweet)
|
normal
|
{
"blob_id": "eb2bb06afb9aeb46ad02cbac145ccd817131074d",
"index": 1753,
"step-1": "'''\r\nCreated on Sep 23, 2016\r\n\r\n@author: Andrew\r\n'''\r\nfrom pymongo import MongoClient\r\nimport re\r\n\r\nclient = MongoClient()\r\n\r\natMentions = re.compile(ur\"@\\w+\", flags=re.I|re.U)\r\natMidnight = re.compile(u\"@midnight\", flags=re.I|re.U)\r\nhashtag = re.compile(ur\"#\\w+\", flags=re.I|re.U)\r\nfeatures = [(\"usf fwa forward most\", \"usf fwa backward most\", \"usf fwa difference most\", \"usf fwa difference most sign\"), (\"usf fwa forward least\", \"usf fwa backward least\", \"usf fwa difference least\", \"usf fwa difference least sign\"), (\"usf fwa forward average\", \"usf fwa backward average\", \"usf fwa difference average\", \"usf fwa difference average sign\")]\r\ncols = [\"GentlerSongs\", \"OlympicSongs\", \"OceanMovies\", \"BoringBlockbusters\"]\r\np_values = []\r\nfor featureF, featureB, featureD, featureS in features:\r\n print \"Testing {} vs {}\".format(featureF, featureB)\r\n lessMoreDiff = [] #holds difference in feature value for less funny - more funny\r\n for col in cols:\r\n tweets = []\r\n for tweet in client.tweets[col].find({\"$and\" : [{\"total likes\" : {\"$gte\" : 7}}, {featureF : {\"$exists\" : True}}, {featureB : {\"$exists\" : True}}]}):\r\n if \"punch words\" not in tweet:\r\n continue\r\n if (tweet[\"punch words\"] == None) or (tweet[\"punch words\"] == []):\r\n continue\r\n for word in tweet[\"punch words\"]:\r\n if word == \"None\":\r\n continue\r\n if not word:\r\n continue\r\n mentions = atMentions.findall(tweet[\"text\"])\r\n if len(mentions) > 1: #if more than 1 person is mentione\r\n continue\r\n elif len(mentions) == 1:\r\n if not atMidnight.match(mentions[0]): #if the mention someone other than @midngiht\r\n continue\r\n if len(hashtag.findall(tweet[\"text\"])) > 1: #if there's more than 1 hashtag\r\n continue\r\n \r\n if (tweet[featureF] > 0) and (tweet[featureB] > 0):\r\n tweet[featureD] = tweet[featureF] - tweet[featureB]\r\n sign = 0 #assume forward and back are equal\r\n if (tweet[featureF] - tweet[featureB]) > 0:\r\n sign = 1\r\n elif ((tweet[featureF] - tweet[featureB])) < 0:\r\n sign = -1\r\n tweet[featureS] = sign\r\n client.tweets[col].update({\"_id\" : tweet[\"_id\"]}, tweet)\r\n \r\n ",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# !usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: Brian Cherinka
# @Date: 2018-08-16 11:43:42
# @Last modified by: Brian Cherinka
# @Last Modified time: 2018-08-16 11:58:06
from __future__ import print_function, division, absolute_import
import pytest
import os
from flask import template_rendered
from flipper.app import create_app
from contextlib import contextmanager
@contextmanager
def captured_templates(app):
''' Records which templates are used '''
recorded = []
def record(app, template, context, **extra):
recorded.append((template, context))
template_rendered.connect(record)
yield recorded
template_rendered.disconnect(record)
@pytest.fixture()
def get_templates(app):
''' Fixture that returns which jinja template used '''
with captured_templates(app) as templates:
yield templates
@pytest.fixture
def app():
''' Flask application '''
app = create_app()
return app
@pytest.yield_fixture
def testctx(monkeypatch):
''' Fixture to create an app with a test Flask base url
Returns only the request context to allow for use for url_for
'''
monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')
app = create_app()
app.testing = True
ctx = app.test_request_context()
ctx.push()
yield
ctx.pop()
@pytest.yield_fixture
def testclient(monkeypatch):
''' Fixture to create an app with a test Flask base url
Returns the client fixture
'''
monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')
app = create_app()
app.testing = True
with app.test_client() as client:
yield client
# global releases to loop over
releases = ['dr15', 'dr16']
@pytest.fixture(params=releases)
def monkeyrelease(monkeypatch, request):
''' Fixture to monkeypatch the flipper release environment variable '''
monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)
yield request.param
|
normal
|
{
"blob_id": "bd00644b9cf019fe8c86d52494389b7f0f03d3c3",
"index": 1276,
"step-1": "<mask token>\n\n\n@contextmanager\ndef captured_templates(app):\n \"\"\" Records which templates are used \"\"\"\n recorded = []\n\n def record(app, template, context, **extra):\n recorded.append((template, context))\n template_rendered.connect(record)\n yield recorded\n template_rendered.disconnect(record)\n\n\n<mask token>\n\n\n@pytest.fixture\ndef app():\n \"\"\" Flask application \"\"\"\n app = create_app()\n return app\n\n\n@pytest.yield_fixture\ndef testctx(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns only the request context to allow for use for url_for\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n ctx = app.test_request_context()\n ctx.push()\n yield\n ctx.pop()\n\n\n@pytest.yield_fixture\ndef testclient(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns the client fixture\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n with app.test_client() as client:\n yield client\n\n\n<mask token>\n\n\n@pytest.fixture(params=releases)\ndef monkeyrelease(monkeypatch, request):\n \"\"\" Fixture to monkeypatch the flipper release environment variable \"\"\"\n monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)\n yield request.param\n",
"step-2": "<mask token>\n\n\n@contextmanager\ndef captured_templates(app):\n \"\"\" Records which templates are used \"\"\"\n recorded = []\n\n def record(app, template, context, **extra):\n recorded.append((template, context))\n template_rendered.connect(record)\n yield recorded\n template_rendered.disconnect(record)\n\n\n@pytest.fixture()\ndef get_templates(app):\n \"\"\" Fixture that returns which jinja template used \"\"\"\n with captured_templates(app) as templates:\n yield templates\n\n\n@pytest.fixture\ndef app():\n \"\"\" Flask application \"\"\"\n app = create_app()\n return app\n\n\n@pytest.yield_fixture\ndef testctx(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns only the request context to allow for use for url_for\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n ctx = app.test_request_context()\n ctx.push()\n yield\n ctx.pop()\n\n\n@pytest.yield_fixture\ndef testclient(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns the client fixture\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n with app.test_client() as client:\n yield client\n\n\n<mask token>\n\n\n@pytest.fixture(params=releases)\ndef monkeyrelease(monkeypatch, request):\n \"\"\" Fixture to monkeypatch the flipper release environment variable \"\"\"\n monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)\n yield request.param\n",
"step-3": "<mask token>\n\n\n@contextmanager\ndef captured_templates(app):\n \"\"\" Records which templates are used \"\"\"\n recorded = []\n\n def record(app, template, context, **extra):\n recorded.append((template, context))\n template_rendered.connect(record)\n yield recorded\n template_rendered.disconnect(record)\n\n\n@pytest.fixture()\ndef get_templates(app):\n \"\"\" Fixture that returns which jinja template used \"\"\"\n with captured_templates(app) as templates:\n yield templates\n\n\n@pytest.fixture\ndef app():\n \"\"\" Flask application \"\"\"\n app = create_app()\n return app\n\n\n@pytest.yield_fixture\ndef testctx(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns only the request context to allow for use for url_for\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n ctx = app.test_request_context()\n ctx.push()\n yield\n ctx.pop()\n\n\n@pytest.yield_fixture\ndef testclient(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns the client fixture\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n with app.test_client() as client:\n yield client\n\n\nreleases = ['dr15', 'dr16']\n\n\n@pytest.fixture(params=releases)\ndef monkeyrelease(monkeypatch, request):\n \"\"\" Fixture to monkeypatch the flipper release environment variable \"\"\"\n monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)\n yield request.param\n",
"step-4": "from __future__ import print_function, division, absolute_import\nimport pytest\nimport os\nfrom flask import template_rendered\nfrom flipper.app import create_app\nfrom contextlib import contextmanager\n\n\n@contextmanager\ndef captured_templates(app):\n \"\"\" Records which templates are used \"\"\"\n recorded = []\n\n def record(app, template, context, **extra):\n recorded.append((template, context))\n template_rendered.connect(record)\n yield recorded\n template_rendered.disconnect(record)\n\n\n@pytest.fixture()\ndef get_templates(app):\n \"\"\" Fixture that returns which jinja template used \"\"\"\n with captured_templates(app) as templates:\n yield templates\n\n\n@pytest.fixture\ndef app():\n \"\"\" Flask application \"\"\"\n app = create_app()\n return app\n\n\n@pytest.yield_fixture\ndef testctx(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns only the request context to allow for use for url_for\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n ctx = app.test_request_context()\n ctx.push()\n yield\n ctx.pop()\n\n\n@pytest.yield_fixture\ndef testclient(monkeypatch):\n \"\"\" Fixture to create an app with a test Flask base url\n\n Returns the client fixture\n\n \"\"\"\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n with app.test_client() as client:\n yield client\n\n\nreleases = ['dr15', 'dr16']\n\n\n@pytest.fixture(params=releases)\ndef monkeyrelease(monkeypatch, request):\n \"\"\" Fixture to monkeypatch the flipper release environment variable \"\"\"\n monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)\n yield request.param\n",
"step-5": "# !usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Licensed under a 3-clause BSD license.\n#\n# @Author: Brian Cherinka\n# @Date: 2018-08-16 11:43:42\n# @Last modified by: Brian Cherinka\n# @Last Modified time: 2018-08-16 11:58:06\n\nfrom __future__ import print_function, division, absolute_import\nimport pytest\nimport os\nfrom flask import template_rendered\nfrom flipper.app import create_app\nfrom contextlib import contextmanager\n\n\n@contextmanager\ndef captured_templates(app):\n ''' Records which templates are used '''\n recorded = []\n\n def record(app, template, context, **extra):\n recorded.append((template, context))\n\n template_rendered.connect(record)\n yield recorded\n template_rendered.disconnect(record)\n\n\n@pytest.fixture()\ndef get_templates(app):\n ''' Fixture that returns which jinja template used '''\n with captured_templates(app) as templates:\n yield templates\n\n\n@pytest.fixture\ndef app():\n ''' Flask application '''\n app = create_app()\n return app\n\n\n@pytest.yield_fixture\ndef testctx(monkeypatch):\n ''' Fixture to create an app with a test Flask base url\n\n Returns only the request context to allow for use for url_for\n\n '''\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n ctx = app.test_request_context()\n ctx.push()\n yield\n ctx.pop()\n\n\n@pytest.yield_fixture\ndef testclient(monkeypatch):\n ''' Fixture to create an app with a test Flask base url\n\n Returns the client fixture\n\n '''\n monkeypatch.setenv('FLIPPER_BASE', 'test/flipper')\n app = create_app()\n app.testing = True\n with app.test_client() as client:\n yield client\n\n\n# global releases to loop over\nreleases = ['dr15', 'dr16']\n\n\n@pytest.fixture(params=releases)\ndef monkeyrelease(monkeypatch, request):\n ''' Fixture to monkeypatch the flipper release environment variable '''\n monkeypatch.setitem(os.environ, 'FLIPPER_RELEASE', request.param)\n yield request.param\n\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
class Solution:
def isPalindrome(self, x: int) ->bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) ->bool:
if x < 0:
return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def isPalindrome(self, x: int) ->bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) ->bool:
if x < 0:
return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome2(self, x: int) ->bool:
if x < 0:
return False
div = 1
while x // div >= 10:
div *= 10
while x > 0:
left = x // div
right = x % 10
if left != right:
return False
x = x % div // 10
div //= 100
return True
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def isPalindrome(self, x: int) ->bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) ->bool:
if x < 0:
return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome2(self, x: int) ->bool:
if x < 0:
return False
div = 1
while x // div >= 10:
div *= 10
while x > 0:
left = x // div
right = x % 10
if left != right:
return False
x = x % div // 10
div //= 100
return True
def isPalindrome3(self, x: int) ->bool:
if x < 0 or x % 10 == 0 and x != 0:
return False
revert_num = 0
while revert_num < x:
num = x % 10
revert_num = revert_num * 10 + num
x //= 10
return revert_num == x or revert_num // 10 == x
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def isPalindrome(self, x: int) ->bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) ->bool:
if x < 0:
return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome2(self, x: int) ->bool:
if x < 0:
return False
div = 1
while x // div >= 10:
div *= 10
while x > 0:
left = x // div
right = x % 10
if left != right:
return False
x = x % div // 10
div //= 100
return True
def isPalindrome3(self, x: int) ->bool:
if x < 0 or x % 10 == 0 and x != 0:
return False
revert_num = 0
while revert_num < x:
num = x % 10
revert_num = revert_num * 10 + num
x //= 10
return revert_num == x or revert_num // 10 == x
if __name__ == '__main__':
s = Solution()
print(s.isPalindrome3(121))
<|reserved_special_token_1|>
# @Time : 2019/6/2 8:42
# @Author : Xu Huipeng
# @Blog : https://brycexxx.github.io/
class Solution:
def isPalindrome(self, x: int) -> bool:
num_str = str(x)
i, j = 0, len(num_str) - 1
while i < j:
if num_str[i] == num_str[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome1(self, x: int) -> bool:
if x < 0: return False
res = []
while x >= 1:
tmp = x // 10
res.append(x - tmp * 10)
x = tmp
i, j = 0, len(res) - 1
while i < j:
if res[i] == res[j]:
i += 1
j -= 1
continue
return False
return True
def isPalindrome2(self, x: int) -> bool:
if x < 0: return False
div = 1
while x // div >= 10: div *= 10
while x > 0:
left = x // div
right = x % 10
if left != right: return False
x = (x % div) // 10
div //= 100
return True
def isPalindrome3(self, x: int) -> bool:
if x < 0 or (x % 10 == 0 and x != 0): return False
revert_num = 0
while revert_num < x:
num = x % 10
revert_num = revert_num * 10 + num
x //= 10
return revert_num == x or revert_num // 10 == x
if __name__ == '__main__':
s = Solution()
print(s.isPalindrome3(121))
|
flexible
|
{
"blob_id": "40f57ccb1e36d307b11e367a2fb2f6c97051c65b",
"index": 6759,
"step-1": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n <mask token>\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) ->bool:\n if x < 0 or x % 10 == 0 and x != 0:\n return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\n<mask token>\n",
"step-4": "class Solution:\n\n def isPalindrome(self, x: int) ->bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) ->bool:\n if x < 0:\n return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) ->bool:\n if x < 0:\n return False\n div = 1\n while x // div >= 10:\n div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right:\n return False\n x = x % div // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) ->bool:\n if x < 0 or x % 10 == 0 and x != 0:\n return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\nif __name__ == '__main__':\n s = Solution()\n print(s.isPalindrome3(121))\n",
"step-5": "# @Time : 2019/6/2 8:42\n# @Author : Xu Huipeng\n# @Blog : https://brycexxx.github.io/\n\nclass Solution:\n def isPalindrome(self, x: int) -> bool:\n num_str = str(x)\n i, j = 0, len(num_str) - 1\n while i < j:\n if num_str[i] == num_str[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome1(self, x: int) -> bool:\n if x < 0: return False\n res = []\n while x >= 1:\n tmp = x // 10\n res.append(x - tmp * 10)\n x = tmp\n i, j = 0, len(res) - 1\n while i < j:\n if res[i] == res[j]:\n i += 1\n j -= 1\n continue\n return False\n return True\n\n def isPalindrome2(self, x: int) -> bool:\n if x < 0: return False\n div = 1\n while x // div >= 10: div *= 10\n while x > 0:\n left = x // div\n right = x % 10\n if left != right: return False\n x = (x % div) // 10\n div //= 100\n return True\n\n def isPalindrome3(self, x: int) -> bool:\n if x < 0 or (x % 10 == 0 and x != 0): return False\n revert_num = 0\n while revert_num < x:\n num = x % 10\n revert_num = revert_num * 10 + num\n x //= 10\n return revert_num == x or revert_num // 10 == x\n\n\nif __name__ == '__main__':\n s = Solution()\n print(s.isPalindrome3(121))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Frame filtering
'''
import numpy as np
import cv2
def filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64), threshold=0.65):
"""Filter noisy frames out
Args:
frames (list<numpy.ndarray[H, W, 3]>): video frames
method (int, optional): histogram comparison method
target_size (tuple<int, int>, optional): frame size used for histogram comparison
threshold (float, optional): minimum correlation between histograms to keep frame
Returns:
list<numpy.ndarray[H, W, 3]>: video frames
"""
resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]
histograms = []
for f in resized_frames:
hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])
histograms.append(cv2.normalize(hist, hist).flatten())
# Find a reference histogram (median less sensitive to noise)
med_hist = np.median(histograms, axis=0)
filtered_frames = []
# Compare all histograms to the median one
for idx, hist in enumerate(histograms):
# Only keep frames with relatively high correlation
if cv2.compareHist(med_hist, hist, method) > threshold:
filtered_frames.append(frames[idx])
return filtered_frames
|
normal
|
{
"blob_id": "1da93e9113089f1a2881d4094180ba524d0d4a86",
"index": 8531,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),\n threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, \n 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n med_hist = np.median(histograms, axis=0)\n filtered_frames = []\n for idx, hist in enumerate(histograms):\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n return filtered_frames\n",
"step-3": "<mask token>\nimport numpy as np\nimport cv2\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64),\n threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, \n 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n med_hist = np.median(histograms, axis=0)\n filtered_frames = []\n for idx, hist in enumerate(histograms):\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n return filtered_frames\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n'''\nFrame filtering\n'''\n\nimport numpy as np\nimport cv2\n\n\ndef filter_frames(frames, method=cv2.HISTCMP_CORREL, target_size=(64, 64), threshold=0.65):\n \"\"\"Filter noisy frames out\n\n Args:\n frames (list<numpy.ndarray[H, W, 3]>): video frames\n method (int, optional): histogram comparison method\n target_size (tuple<int, int>, optional): frame size used for histogram comparison\n threshold (float, optional): minimum correlation between histograms to keep frame\n\n Returns:\n list<numpy.ndarray[H, W, 3]>: video frames\n \"\"\"\n\n resized_frames = [cv2.resize(f.copy(), target_size) for f in frames]\n\n histograms = []\n for f in resized_frames:\n hist = cv2.calcHist([f], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])\n histograms.append(cv2.normalize(hist, hist).flatten())\n\n # Find a reference histogram (median less sensitive to noise)\n med_hist = np.median(histograms, axis=0)\n\n filtered_frames = []\n # Compare all histograms to the median one\n for idx, hist in enumerate(histograms):\n # Only keep frames with relatively high correlation\n if cv2.compareHist(med_hist, hist, method) > threshold:\n filtered_frames.append(frames[idx])\n\n return filtered_frames\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
class LinearRegressor():
def __init__(self, alpha=0.1, epochs=1):
self.alpha = alpha
self.epochs = epochs
self.costs = []
self.theta = None
def _cost_function(self, y_pred, y, m):
"""
Gets the cost for the predicted values when contrasted with the correct ones.
y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor
y: An (1 x m) vector that corresponds to the y (right) values in the dataset
m: the number of samples (it could be also inferred from the shape of y or y_pred)
TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration
"""
sumatory = 0
for x in range(m):
sumatory += (y_pred[0][x] -y[0][x])**2
cost = 1/(2*m) * sumatory
return cost
def _hypothesis(self, X):
"""
Calculates the hypothesis for the given examples using the current self.theta values.
X: an m x n array of m samples/examples with n features each.
Creo que X es en realidad nxm
transpose de theta es 1xn y * nxm = 1xm
TODO: you must return a (1 x m) array, which corresponds to the estimated value for each of the m samples
"""
# * is element wise multiplication
# numpy.dot(), or @ operator will work
result = np.transpose(self.theta)@ X
#emptyResult = np.zeros((1,X.shape[1]))
return result
def _cost_function_derivative(self, y_pred, y, X, m):
"""
Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.
y_pred: an (1 x m) array with the predicted values for X dataset
y: an (1 x m) array with the right values for X dataset
X: the input dataset
m: the number of samples in the dataset
TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.
"""
derivatives= np.zeros((X.shape[0],1))
for j in range(X.shape[0]):
auxsum = 0
for i in range(m):
auxsum+=(y_pred[0][i] -y[0][i])*X[j][i]
derivatives[j][0] = self.theta[j][0] - self.alpha * 1/m * auxsum
#empty_derivatives = np.zeros((X.shape[0],1))
return derivatives
def fit(self, X, y):
"""
Fits the linear regressor to the values in the dataset
X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples
y: is an (1 x m) vector, where m is the number of samples/examples
TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions
"""
n, m = X.shape[0], X.shape[1]
# theta is (nx1) (one theta per dimension)
self.theta = np.random.uniform(-10, 10, (n, 1))
for i in range(self.epochs):
# Get predictions
y_pred = self.predict(X)
# calculate cost
# cost = ...
cost = self._cost_function(y_pred, y, m)
# gradient is an (n) x 1 array, it refers to the derivate per theta
gradient = self._cost_function_derivative(y_pred, y, X, m)
# delta/update rule
self.theta = gradient
self.costs.append(cost)
pass
print("Final theta is {} (cost: {})".format(self.theta.T, cost))
def predict(self, X):
"""
Predicts the values for the given X samples using the current configuration of the Linear Regressor.
X: an (n x m') array with m' samples of n dimensions whose value must be predicted.
TODO: You must return a (1 x m') array that includes the predictions for the given m' samples.
"""
# ! You could simply call the hypothesis here
predictions= self._hypothesis(X)
#empty_predictions = np.zeros((1,X.shape[1]))
return predictions
|
normal
|
{
"blob_id": "d805a1290c107a8d768417a432e338b182b7cd6b",
"index": 5524,
"step-1": "<mask token>\n\n\nclass LinearRegressor:\n <mask token>\n\n def _cost_function(self, y_pred, y, m):\n \"\"\"\n Gets the cost for the predicted values when contrasted with the correct ones.\n y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor\n y: An (1 x m) vector that corresponds to the y (right) values in the dataset\n m: the number of samples (it could be also inferred from the shape of y or y_pred)\n\n TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration\n \"\"\"\n sumatory = 0\n for x in range(m):\n sumatory += (y_pred[0][x] - y[0][x]) ** 2\n cost = 1 / (2 * m) * sumatory\n return cost\n <mask token>\n\n def _cost_function_derivative(self, y_pred, y, X, m):\n \"\"\"\n Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.\n y_pred: an (1 x m) array with the predicted values for X dataset\n y: an (1 x m) array with the right values for X dataset\n X: the input dataset\n m: the number of samples in the dataset\n\n TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.\n \"\"\"\n derivatives = np.zeros((X.shape[0], 1))\n for j in range(X.shape[0]):\n auxsum = 0\n for i in range(m):\n auxsum += (y_pred[0][i] - y[0][i]) * X[j][i]\n derivatives[j][0] = self.theta[j][0] - self.alpha * 1 / m * auxsum\n return derivatives\n\n def fit(self, X, y):\n \"\"\"\n Fits the linear regressor to the values in the dataset\n X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples\n y: is an (1 x m) vector, where m is the number of samples/examples\n\n TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions\n \"\"\"\n n, m = X.shape[0], X.shape[1]\n self.theta = np.random.uniform(-10, 10, (n, 1))\n for i in range(self.epochs):\n y_pred = self.predict(X)\n cost = self._cost_function(y_pred, y, m)\n gradient = self._cost_function_derivative(y_pred, y, X, m)\n self.theta = gradient\n self.costs.append(cost)\n pass\n print('Final theta is {} (cost: {})'.format(self.theta.T, cost))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LinearRegressor:\n <mask token>\n\n def _cost_function(self, y_pred, y, m):\n \"\"\"\n Gets the cost for the predicted values when contrasted with the correct ones.\n y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor\n y: An (1 x m) vector that corresponds to the y (right) values in the dataset\n m: the number of samples (it could be also inferred from the shape of y or y_pred)\n\n TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration\n \"\"\"\n sumatory = 0\n for x in range(m):\n sumatory += (y_pred[0][x] - y[0][x]) ** 2\n cost = 1 / (2 * m) * sumatory\n return cost\n\n def _hypothesis(self, X):\n \"\"\"\n Calculates the hypothesis for the given examples using the current self.theta values.\n X: an m x n array of m samples/examples with n features each.\n Creo que X es en realidad nxm\n transpose de theta es 1xn y * nxm = 1xm\n\n TODO: you must return a (1 x m) array, which corresponds to the estimated value for each of the m samples\n \"\"\"\n result = np.transpose(self.theta) @ X\n return result\n\n def _cost_function_derivative(self, y_pred, y, X, m):\n \"\"\"\n Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.\n y_pred: an (1 x m) array with the predicted values for X dataset\n y: an (1 x m) array with the right values for X dataset\n X: the input dataset\n m: the number of samples in the dataset\n\n TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.\n \"\"\"\n derivatives = np.zeros((X.shape[0], 1))\n for j in range(X.shape[0]):\n auxsum = 0\n for i in range(m):\n auxsum += (y_pred[0][i] - y[0][i]) * X[j][i]\n derivatives[j][0] = self.theta[j][0] - self.alpha * 1 / m * auxsum\n return derivatives\n\n def fit(self, X, y):\n \"\"\"\n Fits the linear regressor to the values in the dataset\n X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples\n y: is an (1 x m) vector, where m is the number of samples/examples\n\n TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions\n \"\"\"\n n, m = X.shape[0], X.shape[1]\n self.theta = np.random.uniform(-10, 10, (n, 1))\n for i in range(self.epochs):\n y_pred = self.predict(X)\n cost = self._cost_function(y_pred, y, m)\n gradient = self._cost_function_derivative(y_pred, y, X, m)\n self.theta = gradient\n self.costs.append(cost)\n pass\n print('Final theta is {} (cost: {})'.format(self.theta.T, cost))\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LinearRegressor:\n\n def __init__(self, alpha=0.1, epochs=1):\n self.alpha = alpha\n self.epochs = epochs\n self.costs = []\n self.theta = None\n\n def _cost_function(self, y_pred, y, m):\n \"\"\"\n Gets the cost for the predicted values when contrasted with the correct ones.\n y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor\n y: An (1 x m) vector that corresponds to the y (right) values in the dataset\n m: the number of samples (it could be also inferred from the shape of y or y_pred)\n\n TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration\n \"\"\"\n sumatory = 0\n for x in range(m):\n sumatory += (y_pred[0][x] - y[0][x]) ** 2\n cost = 1 / (2 * m) * sumatory\n return cost\n\n def _hypothesis(self, X):\n \"\"\"\n Calculates the hypothesis for the given examples using the current self.theta values.\n X: an m x n array of m samples/examples with n features each.\n Creo que X es en realidad nxm\n transpose de theta es 1xn y * nxm = 1xm\n\n TODO: you must return a (1 x m) array, which corresponds to the estimated value for each of the m samples\n \"\"\"\n result = np.transpose(self.theta) @ X\n return result\n\n def _cost_function_derivative(self, y_pred, y, X, m):\n \"\"\"\n Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.\n y_pred: an (1 x m) array with the predicted values for X dataset\n y: an (1 x m) array with the right values for X dataset\n X: the input dataset\n m: the number of samples in the dataset\n\n TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.\n \"\"\"\n derivatives = np.zeros((X.shape[0], 1))\n for j in range(X.shape[0]):\n auxsum = 0\n for i in range(m):\n auxsum += (y_pred[0][i] - y[0][i]) * X[j][i]\n derivatives[j][0] = self.theta[j][0] - self.alpha * 1 / m * auxsum\n return derivatives\n\n def fit(self, X, y):\n \"\"\"\n Fits the linear regressor to the values in the dataset\n X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples\n y: is an (1 x m) vector, where m is the number of samples/examples\n\n TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions\n \"\"\"\n n, m = X.shape[0], X.shape[1]\n self.theta = np.random.uniform(-10, 10, (n, 1))\n for i in range(self.epochs):\n y_pred = self.predict(X)\n cost = self._cost_function(y_pred, y, m)\n gradient = self._cost_function_derivative(y_pred, y, X, m)\n self.theta = gradient\n self.costs.append(cost)\n pass\n print('Final theta is {} (cost: {})'.format(self.theta.T, cost))\n <mask token>\n",
"step-4": "import numpy as np\n\n\nclass LinearRegressor:\n\n def __init__(self, alpha=0.1, epochs=1):\n self.alpha = alpha\n self.epochs = epochs\n self.costs = []\n self.theta = None\n\n def _cost_function(self, y_pred, y, m):\n \"\"\"\n Gets the cost for the predicted values when contrasted with the correct ones.\n y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor\n y: An (1 x m) vector that corresponds to the y (right) values in the dataset\n m: the number of samples (it could be also inferred from the shape of y or y_pred)\n\n TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration\n \"\"\"\n sumatory = 0\n for x in range(m):\n sumatory += (y_pred[0][x] - y[0][x]) ** 2\n cost = 1 / (2 * m) * sumatory\n return cost\n\n def _hypothesis(self, X):\n \"\"\"\n Calculates the hypothesis for the given examples using the current self.theta values.\n X: an m x n array of m samples/examples with n features each.\n Creo que X es en realidad nxm\n transpose de theta es 1xn y * nxm = 1xm\n\n TODO: you must return a (1 x m) array, which corresponds to the estimated value for each of the m samples\n \"\"\"\n result = np.transpose(self.theta) @ X\n return result\n\n def _cost_function_derivative(self, y_pred, y, X, m):\n \"\"\"\n Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.\n y_pred: an (1 x m) array with the predicted values for X dataset\n y: an (1 x m) array with the right values for X dataset\n X: the input dataset\n m: the number of samples in the dataset\n\n TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.\n \"\"\"\n derivatives = np.zeros((X.shape[0], 1))\n for j in range(X.shape[0]):\n auxsum = 0\n for i in range(m):\n auxsum += (y_pred[0][i] - y[0][i]) * X[j][i]\n derivatives[j][0] = self.theta[j][0] - self.alpha * 1 / m * auxsum\n return derivatives\n\n def fit(self, X, y):\n \"\"\"\n Fits the linear regressor to the values in the dataset\n X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples\n y: is an (1 x m) vector, where m is the number of samples/examples\n\n TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions\n \"\"\"\n n, m = X.shape[0], X.shape[1]\n self.theta = np.random.uniform(-10, 10, (n, 1))\n for i in range(self.epochs):\n y_pred = self.predict(X)\n cost = self._cost_function(y_pred, y, m)\n gradient = self._cost_function_derivative(y_pred, y, X, m)\n self.theta = gradient\n self.costs.append(cost)\n pass\n print('Final theta is {} (cost: {})'.format(self.theta.T, cost))\n\n def predict(self, X):\n \"\"\"\n Predicts the values for the given X samples using the current configuration of the Linear Regressor.\n\n X: an (n x m') array with m' samples of n dimensions whose value must be predicted.\n\n TODO: You must return a (1 x m') array that includes the predictions for the given m' samples.\n \"\"\"\n predictions = self._hypothesis(X)\n return predictions\n",
"step-5": "import numpy as np\n\n\nclass LinearRegressor():\n def __init__(self, alpha=0.1, epochs=1):\n self.alpha = alpha\n self.epochs = epochs\n self.costs = []\n self.theta = None\n\n def _cost_function(self, y_pred, y, m):\n \"\"\"\n Gets the cost for the predicted values when contrasted with the correct ones.\n y_pred: An (1 x m) vector that corresponds to the values predicted by the Linear Regressor\n y: An (1 x m) vector that corresponds to the y (right) values in the dataset\n m: the number of samples (it could be also inferred from the shape of y or y_pred)\n\n TODO: You must implement the cost function and return an scalar that corresponds to the error produced by the Linear Regressor with its current configuration\n \"\"\"\n sumatory = 0\n for x in range(m):\n sumatory += (y_pred[0][x] -y[0][x])**2\n\n cost = 1/(2*m) * sumatory\n return cost\n\n\n def _hypothesis(self, X):\n \"\"\"\n Calculates the hypothesis for the given examples using the current self.theta values.\n X: an m x n array of m samples/examples with n features each.\n Creo que X es en realidad nxm\n transpose de theta es 1xn y * nxm = 1xm\n\n TODO: you must return a (1 x m) array, which corresponds to the estimated value for each of the m samples\n \"\"\"\n # * is element wise multiplication\n # numpy.dot(), or @ operator will work\n result = np.transpose(self.theta)@ X \n #emptyResult = np.zeros((1,X.shape[1]))\n return result \n\n def _cost_function_derivative(self, y_pred, y, X, m):\n \"\"\"\n Calculates the derivatives (gradient) of the cost function through the obtained/predicted values.\n y_pred: an (1 x m) array with the predicted values for X dataset\n y: an (1 x m) array with the right values for X dataset\n X: the input dataset\n m: the number of samples in the dataset\n\n TODO: You must implement the calculation of derivatives. An (n x 1) array that corresponds to the gradient of current theta values (the derivative per theta parameter) must be returned.\n \"\"\"\n\n derivatives= np.zeros((X.shape[0],1))\n for j in range(X.shape[0]):\n auxsum = 0\n for i in range(m):\n auxsum+=(y_pred[0][i] -y[0][i])*X[j][i]\n derivatives[j][0] = self.theta[j][0] - self.alpha * 1/m * auxsum\n\n #empty_derivatives = np.zeros((X.shape[0],1))\n return derivatives\n\n def fit(self, X, y):\n \"\"\"\n Fits the linear regressor to the values in the dataset\n X: is an (n x m) vector, where n is the number of features and m is the number of samples/examples\n y: is an (1 x m) vector, where m is the number of samples/examples\n\n TODO: You need to provide an implementation that in each epoch is updating the values for the theta parameters by using the hypothesis and cost function functions\n \"\"\"\n\n n, m = X.shape[0], X.shape[1]\n\n # theta is (nx1) (one theta per dimension)\n self.theta = np.random.uniform(-10, 10, (n, 1))\n\n for i in range(self.epochs):\n # Get predictions\n y_pred = self.predict(X)\n\n # calculate cost\n # cost = ...\n cost = self._cost_function(y_pred, y, m)\n \n\n # gradient is an (n) x 1 array, it refers to the derivate per theta\n gradient = self._cost_function_derivative(y_pred, y, X, m)\n\n # delta/update rule\n self.theta = gradient\n\n self.costs.append(cost)\n pass\n\n print(\"Final theta is {} (cost: {})\".format(self.theta.T, cost))\n\n def predict(self, X):\n \"\"\"\n Predicts the values for the given X samples using the current configuration of the Linear Regressor.\n\n X: an (n x m') array with m' samples of n dimensions whose value must be predicted.\n\n TODO: You must return a (1 x m') array that includes the predictions for the given m' samples.\n \"\"\"\n # ! You could simply call the hypothesis here\n predictions= self._hypothesis(X)\n #empty_predictions = np.zeros((1,X.shape[1]))\n return predictions",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
# file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/16303437.py generated: Wed, 25 Jan 2017 15:25:22
#
# Event Type: 16303437
#
# ASCII decay Descriptor: [Xi_b- -> (rho- -> pi- pi0) K- p+]cc
#
from Configurables import Generation
Generation().EventType = 16303437
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Omegab_rho-h-p+,pi-pi0_PPChange=phsp,DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 5132,-5132 ]
from Configurables import LHCb__ParticlePropertySvc
LHCb__ParticlePropertySvc().Particles = [ "Xi_b- 122 5132 -1.0 6.048 1.13e-012 Xi_b- 5132 0.000000e+000", "Xi_b~+ 123 -5132 1.0 6.071 1.1e-012 anti-Xi_b+ -5132 0.000000e+000" ]
|
normal
|
{
"blob_id": "7cc9d445d712d485eaebd090d2485dac0c38b3fb",
"index": 5918,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nGeneration().addTool(SignalRepeatedHadronization)\n<mask token>\nToolSvc().addTool(EvtGenDecay)\n<mask token>\n",
"step-3": "<mask token>\nGeneration().EventType = 16303437\nGeneration().SampleGenerationTool = 'SignalRepeatedHadronization'\n<mask token>\nGeneration().addTool(SignalRepeatedHadronization)\nGeneration().SignalRepeatedHadronization.ProductionTool = 'PythiaProduction'\n<mask token>\nToolSvc().addTool(EvtGenDecay)\nToolSvc().EvtGenDecay.UserDecayFile = (\n '$DECFILESROOT/dkfiles/Omegab_rho-h-p+,pi-pi0_PPChange=phsp,DecProdCut.dec'\n )\nGeneration().SignalRepeatedHadronization.CutTool = 'DaughtersInLHCb'\nGeneration().SignalRepeatedHadronization.SignalPIDList = [5132, -5132]\n<mask token>\nLHCb__ParticlePropertySvc().Particles = [\n 'Xi_b- 122 5132 -1.0 6.048 1.13e-012 Xi_b- 5132 0.000000e+000',\n 'Xi_b~+ 123 -5132 1.0 6.071 1.1e-012 anti-Xi_b+ -5132 0.000000e+000']\n",
"step-4": "from Configurables import Generation\nGeneration().EventType = 16303437\nGeneration().SampleGenerationTool = 'SignalRepeatedHadronization'\nfrom Configurables import SignalRepeatedHadronization\nGeneration().addTool(SignalRepeatedHadronization)\nGeneration().SignalRepeatedHadronization.ProductionTool = 'PythiaProduction'\nfrom Configurables import ToolSvc\nfrom Configurables import EvtGenDecay\nToolSvc().addTool(EvtGenDecay)\nToolSvc().EvtGenDecay.UserDecayFile = (\n '$DECFILESROOT/dkfiles/Omegab_rho-h-p+,pi-pi0_PPChange=phsp,DecProdCut.dec'\n )\nGeneration().SignalRepeatedHadronization.CutTool = 'DaughtersInLHCb'\nGeneration().SignalRepeatedHadronization.SignalPIDList = [5132, -5132]\nfrom Configurables import LHCb__ParticlePropertySvc\nLHCb__ParticlePropertySvc().Particles = [\n 'Xi_b- 122 5132 -1.0 6.048 1.13e-012 Xi_b- 5132 0.000000e+000',\n 'Xi_b~+ 123 -5132 1.0 6.071 1.1e-012 anti-Xi_b+ -5132 0.000000e+000']\n",
"step-5": "# file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/16303437.py generated: Wed, 25 Jan 2017 15:25:22\n#\n# Event Type: 16303437\n#\n# ASCII decay Descriptor: [Xi_b- -> (rho- -> pi- pi0) K- p+]cc\n#\nfrom Configurables import Generation\nGeneration().EventType = 16303437\nGeneration().SampleGenerationTool = \"SignalRepeatedHadronization\"\nfrom Configurables import SignalRepeatedHadronization\nGeneration().addTool( SignalRepeatedHadronization )\nGeneration().SignalRepeatedHadronization.ProductionTool = \"PythiaProduction\"\nfrom Configurables import ToolSvc\nfrom Configurables import EvtGenDecay\nToolSvc().addTool( EvtGenDecay )\nToolSvc().EvtGenDecay.UserDecayFile = \"$DECFILESROOT/dkfiles/Omegab_rho-h-p+,pi-pi0_PPChange=phsp,DecProdCut.dec\"\nGeneration().SignalRepeatedHadronization.CutTool = \"DaughtersInLHCb\"\nGeneration().SignalRepeatedHadronization.SignalPIDList = [ 5132,-5132 ]\nfrom Configurables import LHCb__ParticlePropertySvc\nLHCb__ParticlePropertySvc().Particles = [ \"Xi_b- 122 5132 -1.0 6.048 1.13e-012 Xi_b- 5132 0.000000e+000\", \"Xi_b~+ 123 -5132 1.0 6.071 1.1e-012 anti-Xi_b+ -5132 0.000000e+000\" ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def word_count(s):
cache = {}
ignore = '":;,.-+=/\\|[]{}()*^&'
lower = s.lower()
for i in lower:
if i in ignore:
lower = lower.replace(i, '')
words = lower.split()
for j in words:
if j not in cache:
cache[j] = 1
else:
cache[j] += 1
return cache
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def word_count(s):
cache = {}
ignore = '":;,.-+=/\\|[]{}()*^&'
lower = s.lower()
for i in lower:
if i in ignore:
lower = lower.replace(i, '')
words = lower.split()
for j in words:
if j not in cache:
cache[j] = 1
else:
cache[j] += 1
return cache
if __name__ == '__main__':
print(word_count(''))
print(word_count('Hello'))
print(word_count('Hello, my cat. And my cat doesn\'t say "hello" back.'))
print(word_count(
'This is a test of the emergency broadcast network. This is only a test.'
))
<|reserved_special_token_1|>
def word_count(s):
# Your code here
cache = {}
ignore = '":;,.-+=/\\|[]{}()*^&'
lower = s.lower()
for i in lower:
if i in ignore:
lower = lower.replace(i, '')
words = lower.split()
for j in words:
if j not in cache:
cache[j] = 1
else:
cache[j] += 1
return cache
if __name__ == "__main__":
print(word_count(""))
print(word_count("Hello"))
print(word_count('Hello, my cat. And my cat doesn\'t say "hello" back.'))
print(word_count(
'This is a test of the emergency broadcast network. This is only a test.'))
|
flexible
|
{
"blob_id": "97d84f99264afa5e7df4b5d22cf4c49b2d14ff7a",
"index": 8291,
"step-1": "<mask token>\n",
"step-2": "def word_count(s):\n cache = {}\n ignore = '\":;,.-+=/\\\\|[]{}()*^&'\n lower = s.lower()\n for i in lower:\n if i in ignore:\n lower = lower.replace(i, '')\n words = lower.split()\n for j in words:\n if j not in cache:\n cache[j] = 1\n else:\n cache[j] += 1\n return cache\n\n\n<mask token>\n",
"step-3": "def word_count(s):\n cache = {}\n ignore = '\":;,.-+=/\\\\|[]{}()*^&'\n lower = s.lower()\n for i in lower:\n if i in ignore:\n lower = lower.replace(i, '')\n words = lower.split()\n for j in words:\n if j not in cache:\n cache[j] = 1\n else:\n cache[j] += 1\n return cache\n\n\nif __name__ == '__main__':\n print(word_count(''))\n print(word_count('Hello'))\n print(word_count('Hello, my cat. And my cat doesn\\'t say \"hello\" back.'))\n print(word_count(\n 'This is a test of the emergency broadcast network. This is only a test.'\n ))\n",
"step-4": "def word_count(s):\n # Your code here\n cache = {}\n ignore = '\":;,.-+=/\\\\|[]{}()*^&'\n lower = s.lower()\n\n for i in lower:\n if i in ignore:\n lower = lower.replace(i, '')\n words = lower.split()\n for j in words:\n if j not in cache:\n cache[j] = 1\n else:\n cache[j] += 1\n return cache\n\n\nif __name__ == \"__main__\":\n print(word_count(\"\"))\n print(word_count(\"Hello\"))\n print(word_count('Hello, my cat. And my cat doesn\\'t say \"hello\" back.'))\n print(word_count(\n 'This is a test of the emergency broadcast network. This is only a test.'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(a)
print(b)
<|reserved_special_token_0|>
print(a)
print(b)
<|reserved_special_token_0|>
print(USER_NAME)
print(USER_NAME)
<|reserved_special_token_1|>
a = 1
b = a
print(a)
print(b)
a = 2
print(a)
print(b)
USER_NAME = '常量'
print(USER_NAME)
print(USER_NAME)
<|reserved_special_token_1|>
a = 1
b = a
print(a)
print(b)
a = 2
print(a)
print(b)
# 全部大写字符代表常量
USER_NAME = "常量"
print(USER_NAME)
print(USER_NAME)
|
flexible
|
{
"blob_id": "1cc9a7bbe1bda06ce76fa8ec1cdc17c7b2fde73b",
"index": 4051,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(USER_NAME)\nprint(USER_NAME)\n",
"step-3": "a = 1\nb = a\nprint(a)\nprint(b)\na = 2\nprint(a)\nprint(b)\nUSER_NAME = '常量'\nprint(USER_NAME)\nprint(USER_NAME)\n",
"step-4": "\na = 1\nb = a\nprint(a)\nprint(b)\n\na = 2\nprint(a)\nprint(b)\n\n# 全部大写字符代表常量\n\nUSER_NAME = \"常量\"\nprint(USER_NAME)\n\nprint(USER_NAME)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
BLACK = '\x1b[30;0m'
RED = '\x1b[31;0m'
GREEN = '\x1b[32;0m'
YELLOW = '\x1b[33;0m'
BLUE = '\x1b[34;0m'
PINK = '\x1b[35;0m'
CBLUE = '\x1b[36;0m'
WHITE = '\x1b[37;0m'
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import sys
BLACK = '\x1b[30;0m'
RED = '\x1b[31;0m'
GREEN = '\x1b[32;0m'
YELLOW = '\x1b[33;0m'
BLUE = '\x1b[34;0m'
PINK = '\x1b[35;0m'
CBLUE = '\x1b[36;0m'
WHITE = '\x1b[37;0m'
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/python
import sys
BLACK = '\033[30;0m'
RED = '\033[31;0m'
GREEN = '\033[32;0m'
YELLOW = '\033[33;0m'
BLUE = '\033[34;0m'
PINK = '\033[35;0m'
CBLUE = '\033[36;0m'
WHITE = '\033[37;0m'
def colorPrint(color, str):
print(color + str + '\033[0m');
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "a49c00dab8d445ce0b08fd31a4a41d6c8976d662",
"index": 2263,
"step-1": "<mask token>\n\n\ndef colorPrint(color, str):\n print(color + str + '\\x1b[0m')\n\n\ndef main():\n if sys.argv.__len__() < 2:\n print('Wrong usage, exit')\n return\n colorPrint(YELLOW, sys.argv[1])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef colorPrint(color, str):\n print(color + str + '\\x1b[0m')\n\n\ndef main():\n if sys.argv.__len__() < 2:\n print('Wrong usage, exit')\n return\n colorPrint(YELLOW, sys.argv[1])\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nBLACK = '\\x1b[30;0m'\nRED = '\\x1b[31;0m'\nGREEN = '\\x1b[32;0m'\nYELLOW = '\\x1b[33;0m'\nBLUE = '\\x1b[34;0m'\nPINK = '\\x1b[35;0m'\nCBLUE = '\\x1b[36;0m'\nWHITE = '\\x1b[37;0m'\n\n\ndef colorPrint(color, str):\n print(color + str + '\\x1b[0m')\n\n\ndef main():\n if sys.argv.__len__() < 2:\n print('Wrong usage, exit')\n return\n colorPrint(YELLOW, sys.argv[1])\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nBLACK = '\\x1b[30;0m'\nRED = '\\x1b[31;0m'\nGREEN = '\\x1b[32;0m'\nYELLOW = '\\x1b[33;0m'\nBLUE = '\\x1b[34;0m'\nPINK = '\\x1b[35;0m'\nCBLUE = '\\x1b[36;0m'\nWHITE = '\\x1b[37;0m'\n\n\ndef colorPrint(color, str):\n print(color + str + '\\x1b[0m')\n\n\ndef main():\n if sys.argv.__len__() < 2:\n print('Wrong usage, exit')\n return\n colorPrint(YELLOW, sys.argv[1])\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/python\nimport sys\n\nBLACK = '\\033[30;0m'\nRED = '\\033[31;0m'\nGREEN = '\\033[32;0m'\nYELLOW = '\\033[33;0m'\nBLUE = '\\033[34;0m'\nPINK = '\\033[35;0m'\nCBLUE = '\\033[36;0m'\nWHITE = '\\033[37;0m'\n\ndef colorPrint(color, str):\n print(color + str + '\\033[0m');\n\ndef main():\n if sys.argv.__len__() < 2:\n print('Wrong usage, exit')\n return\n colorPrint(YELLOW, sys.argv[1])\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_exsit(process_name):
WMI = win32com.client.GetObject('winmgmts:')
processCodeCov = WMI.ExecQuery(
'select * from Win32_Process where Name="%s"' % process_name)
if len(processCodeCov) > 0:
return bool(True)
else:
return bool(False)
if __name__ == '__main__':
process = 'OUTLOOK.EXE'
if check_exsit(process):
os.system('taskkill /F /IM OUTLOOK.EXE')
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
else:
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__author__ = 'liwenchang'
<|reserved_special_token_0|>
def check_exsit(process_name):
WMI = win32com.client.GetObject('winmgmts:')
processCodeCov = WMI.ExecQuery(
'select * from Win32_Process where Name="%s"' % process_name)
if len(processCodeCov) > 0:
return bool(True)
else:
return bool(False)
if __name__ == '__main__':
process = 'OUTLOOK.EXE'
if check_exsit(process):
os.system('taskkill /F /IM OUTLOOK.EXE')
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
else:
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__author__ = 'liwenchang'
import os
import time
import win32api, win32pdhutil, win32con, win32com.client
import win32pdh, string
def check_exsit(process_name):
WMI = win32com.client.GetObject('winmgmts:')
processCodeCov = WMI.ExecQuery(
'select * from Win32_Process where Name="%s"' % process_name)
if len(processCodeCov) > 0:
return bool(True)
else:
return bool(False)
if __name__ == '__main__':
process = 'OUTLOOK.EXE'
if check_exsit(process):
os.system('taskkill /F /IM OUTLOOK.EXE')
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
else:
os.startfile(
'C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__author__ = 'liwenchang'
#-*- coding:utf-8 -*-
import os
import time
import win32api, win32pdhutil, win32con, win32com.client
import win32pdh, string
def check_exsit(process_name):
WMI = win32com.client.GetObject('winmgmts:')
processCodeCov = WMI.ExecQuery('select * from Win32_Process where Name="%s"' % process_name)
if len(processCodeCov) > 0:
#print '%s is exists' % process_name
return bool(True)
else:
#print '%s is not exists' % process_name
return bool(False)
if __name__ == '__main__':
process='OUTLOOK.EXE'
if check_exsit(process):
os.system('taskkill /F /IM OUTLOOK.EXE')
os.startfile("C:\Program Files (x86)\Microsoft Office\Office15\OUTLOOK.EXE")
else:
os.startfile("C:\Program Files (x86)\Microsoft Office\Office15\OUTLOOK.EXE")
#os.system('taskkill /F /IM OUTLOOK.EXE')
#os.startfile("C:\Program Files (x86)\Microsoft Office\Office15\OUTLOOK.EXE")
'''
# ***********************************************************************
# ***********************************************************************
def GetProcessID( name ):
object = "Process"
items, instances = win32pdh.EnumObjectItems(None,None,object, win32pdh.PERF_DETAIL_WIZARD)
val = None
if name in instances :
hq = win32pdh.OpenQuery()
hcs = []
item = "ID Process"
path = win32pdh.MakeCounterPath( (None,object,name, None, 0, item) )
hcs.append(win32pdh.AddCounter(hq, path))
win32pdh.CollectQueryData(hq)
time.sleep(0.01)
win32pdh.CollectQueryData(hq)
for hc in hcs:
type, val = win32pdh.GetFormattedCounterValue(hc, win32pdh.PDH_FMT_LONG)
win32pdh.RemoveCounter(hc)
win32pdh.CloseQuery(hq)
return val
# ***********************************************************************
# ***********************************************************************
# ***********************************************************************
def Kill_Process ( name ) :
pid = GetProcessID (name)
print pid
if pid:
print "exist"
Kill_Process_pid(pid)
else:
print "not this proccess"
# ***********************************************************************
'''
'''
#THIS IS SLOW !!
def Kill_Process ( process ) :
#get process id's for the given process name
pids = win32pdhutil.FindPerformanceAttributesByName ( process )
for p in pids:
handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle
win32api.TerminateProcess(handle,0) #kill by handle
win32api.CloseHandle(handle) #close api
'''
# ***********************************************************************
# ***********************************************************************
'''
def Kill_Process ( process_name ) :
#get process id's for the given process name
pids = win32pdhutil.FindPerformanceAttributesByName ( 'OUTLOOK.EXE' )
print pids
for p in pids:
handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle
# win32api.TerminateProcess(handle,0) #kill by handle
# win32api.CloseHandle(handle) #close api
'''
'''
import os
command = 'taskkill /F /IM QQ.exe'
os.system(command)
'''
'''
# ***********************************************************************
# ***********************************************************************
if __name__ == "__main__":
a = GetAllProcesses()
print a
process = 'alg'# process name
Kill_Process ( process )
os.startfile("C:\Program Files (x86)\Microsoft Office\Office15\OUTLOOK.EXE")
'''
|
flexible
|
{
"blob_id": "bb6d6061365fad809448d09a1c031b984423b5e0",
"index": 8658,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_exsit(process_name):\n WMI = win32com.client.GetObject('winmgmts:')\n processCodeCov = WMI.ExecQuery(\n 'select * from Win32_Process where Name=\"%s\"' % process_name)\n if len(processCodeCov) > 0:\n return bool(True)\n else:\n return bool(False)\n\n\nif __name__ == '__main__':\n process = 'OUTLOOK.EXE'\n if check_exsit(process):\n os.system('taskkill /F /IM OUTLOOK.EXE')\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n else:\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n<mask token>\n",
"step-3": "__author__ = 'liwenchang'\n<mask token>\n\n\ndef check_exsit(process_name):\n WMI = win32com.client.GetObject('winmgmts:')\n processCodeCov = WMI.ExecQuery(\n 'select * from Win32_Process where Name=\"%s\"' % process_name)\n if len(processCodeCov) > 0:\n return bool(True)\n else:\n return bool(False)\n\n\nif __name__ == '__main__':\n process = 'OUTLOOK.EXE'\n if check_exsit(process):\n os.system('taskkill /F /IM OUTLOOK.EXE')\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n else:\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n<mask token>\n",
"step-4": "__author__ = 'liwenchang'\nimport os\nimport time\nimport win32api, win32pdhutil, win32con, win32com.client\nimport win32pdh, string\n\n\ndef check_exsit(process_name):\n WMI = win32com.client.GetObject('winmgmts:')\n processCodeCov = WMI.ExecQuery(\n 'select * from Win32_Process where Name=\"%s\"' % process_name)\n if len(processCodeCov) > 0:\n return bool(True)\n else:\n return bool(False)\n\n\nif __name__ == '__main__':\n process = 'OUTLOOK.EXE'\n if check_exsit(process):\n os.system('taskkill /F /IM OUTLOOK.EXE')\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n else:\n os.startfile(\n 'C:\\\\Program Files (x86)\\\\Microsoft Office\\\\Office15\\\\OUTLOOK.EXE')\n<mask token>\n",
"step-5": "__author__ = 'liwenchang'\n #-*- coding:utf-8 -*-\nimport os\nimport time\nimport win32api, win32pdhutil, win32con, win32com.client\nimport win32pdh, string\n\n\ndef check_exsit(process_name):\n WMI = win32com.client.GetObject('winmgmts:')\n processCodeCov = WMI.ExecQuery('select * from Win32_Process where Name=\"%s\"' % process_name)\n if len(processCodeCov) > 0:\n #print '%s is exists' % process_name\n return bool(True)\n else:\n #print '%s is not exists' % process_name\n return bool(False)\n\n\n\nif __name__ == '__main__':\n process='OUTLOOK.EXE'\n\n if check_exsit(process):\n os.system('taskkill /F /IM OUTLOOK.EXE')\n os.startfile(\"C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE\")\n else:\n os.startfile(\"C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE\")\n\n\n\n\n\n#os.system('taskkill /F /IM OUTLOOK.EXE')\n#os.startfile(\"C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE\")\n\n'''\n# ***********************************************************************\n# ***********************************************************************\ndef GetProcessID( name ):\n object = \"Process\"\n items, instances = win32pdh.EnumObjectItems(None,None,object, win32pdh.PERF_DETAIL_WIZARD)\n val = None\n if name in instances :\n hq = win32pdh.OpenQuery()\n hcs = []\n item = \"ID Process\"\n path = win32pdh.MakeCounterPath( (None,object,name, None, 0, item) )\n hcs.append(win32pdh.AddCounter(hq, path))\n win32pdh.CollectQueryData(hq)\n time.sleep(0.01)\n win32pdh.CollectQueryData(hq)\n for hc in hcs:\n type, val = win32pdh.GetFormattedCounterValue(hc, win32pdh.PDH_FMT_LONG)\n win32pdh.RemoveCounter(hc)\n win32pdh.CloseQuery(hq)\n return val\n# ***********************************************************************\n\n\n# ***********************************************************************\n# ***********************************************************************\ndef Kill_Process ( name ) :\n pid = GetProcessID (name)\n print pid\n if pid:\n print \"exist\"\n Kill_Process_pid(pid)\n else:\n print \"not this proccess\"\n# ***********************************************************************\n'''\n\n\n'''\n#THIS IS SLOW !!\ndef Kill_Process ( process ) :\n #get process id's for the given process name\n pids = win32pdhutil.FindPerformanceAttributesByName ( process )\n for p in pids:\n handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle\n win32api.TerminateProcess(handle,0) #kill by handle\n win32api.CloseHandle(handle) #close api\n\n'''\n\n# ***********************************************************************\n# ***********************************************************************\n\n\n'''\ndef Kill_Process ( process_name ) :\n #get process id's for the given process name\n pids = win32pdhutil.FindPerformanceAttributesByName ( 'OUTLOOK.EXE' )\n print pids\n for p in pids:\n handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0, p) #get process handle\n# win32api.TerminateProcess(handle,0) #kill by handle\n# win32api.CloseHandle(handle) #close api\n'''\n\n\n'''\nimport os\ncommand = 'taskkill /F /IM QQ.exe'\nos.system(command)\n'''\n\n\n\n\n'''\n\n\n\n# ***********************************************************************\n# ***********************************************************************\nif __name__ == \"__main__\":\n a = GetAllProcesses()\n print a\n\n process = 'alg'# process name\n Kill_Process ( process )\n\n\n\n\n\n\nos.startfile(\"C:\\Program Files (x86)\\Microsoft Office\\Office15\\OUTLOOK.EXE\")\n'''\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class LGBModel(ModelFT, LightGBMFInt):
<|reserved_special_token_0|>
def __init__(self, loss='mse', early_stopping_rounds=50,
num_boost_round=1000, **kwargs):
if loss not in {'mse', 'binary'}:
raise NotImplementedError
self.params = {'objective': loss, 'verbosity': -1}
self.params.update(kwargs)
self.early_stopping_rounds = early_stopping_rounds
self.num_boost_round = num_boost_round
self.model = None
def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple
[lgb.Dataset, str]]:
"""
The motivation of current version is to make validation optional
- train segment is necessary;
"""
ds_l = []
assert 'train' in dataset.segments
for key in ['train', 'valid']:
if key in dataset.segments:
df = dataset.prepare(key, col_set=['feature', 'label'],
data_key=DataHandlerLP.DK_L)
if df.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.'
)
x, y = df['feature'], df['label']
if y.values.ndim == 2 and y.values.shape[1] == 1:
y = np.squeeze(y.values)
else:
raise ValueError(
"LightGBM doesn't support multi-label training")
if reweighter is None:
w = None
elif isinstance(reweighter, Reweighter):
w = reweighter.reweight(df)
else:
raise ValueError('Unsupported reweighter type.')
ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))
return ds_l
def fit(self, dataset: DatasetH, num_boost_round=None,
early_stopping_rounds=None, verbose_eval=20, evals_result=None,
reweighter=None, **kwargs):
if evals_result is None:
evals_result = {}
ds_l = self._prepare_data(dataset, reweighter)
ds, names = list(zip(*ds_l))
early_stopping_callback = lgb.early_stopping(self.
early_stopping_rounds if early_stopping_rounds is None else
early_stopping_rounds)
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
evals_result_callback = lgb.record_evaluation(evals_result)
self.model = lgb.train(self.params, ds[0], num_boost_round=self.
num_boost_round if num_boost_round is None else num_boost_round,
valid_sets=ds, valid_names=names, callbacks=[
early_stopping_callback, verbose_eval_callback,
evals_result_callback], **kwargs)
for k in names:
for key, val in evals_result[k].items():
name = f'{key}.{k}'
for epoch, m in enumerate(val):
R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)
<|reserved_special_token_0|>
def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=
20, reweighter=None):
"""
finetune model
Parameters
----------
dataset : DatasetH
dataset for finetuning
num_boost_round : int
number of round to finetune model
verbose_eval : int
verbose level
"""
dtrain, _ = self._prepare_data(dataset, reweighter)
if dtrain.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.')
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
self.model = lgb.train(self.params, dtrain, num_boost_round=
num_boost_round, init_model=self.model, valid_sets=[dtrain],
valid_names=['train'], callbacks=[verbose_eval_callback])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LGBModel(ModelFT, LightGBMFInt):
<|reserved_special_token_0|>
def __init__(self, loss='mse', early_stopping_rounds=50,
num_boost_round=1000, **kwargs):
if loss not in {'mse', 'binary'}:
raise NotImplementedError
self.params = {'objective': loss, 'verbosity': -1}
self.params.update(kwargs)
self.early_stopping_rounds = early_stopping_rounds
self.num_boost_round = num_boost_round
self.model = None
def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple
[lgb.Dataset, str]]:
"""
The motivation of current version is to make validation optional
- train segment is necessary;
"""
ds_l = []
assert 'train' in dataset.segments
for key in ['train', 'valid']:
if key in dataset.segments:
df = dataset.prepare(key, col_set=['feature', 'label'],
data_key=DataHandlerLP.DK_L)
if df.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.'
)
x, y = df['feature'], df['label']
if y.values.ndim == 2 and y.values.shape[1] == 1:
y = np.squeeze(y.values)
else:
raise ValueError(
"LightGBM doesn't support multi-label training")
if reweighter is None:
w = None
elif isinstance(reweighter, Reweighter):
w = reweighter.reweight(df)
else:
raise ValueError('Unsupported reweighter type.')
ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))
return ds_l
def fit(self, dataset: DatasetH, num_boost_round=None,
early_stopping_rounds=None, verbose_eval=20, evals_result=None,
reweighter=None, **kwargs):
if evals_result is None:
evals_result = {}
ds_l = self._prepare_data(dataset, reweighter)
ds, names = list(zip(*ds_l))
early_stopping_callback = lgb.early_stopping(self.
early_stopping_rounds if early_stopping_rounds is None else
early_stopping_rounds)
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
evals_result_callback = lgb.record_evaluation(evals_result)
self.model = lgb.train(self.params, ds[0], num_boost_round=self.
num_boost_round if num_boost_round is None else num_boost_round,
valid_sets=ds, valid_names=names, callbacks=[
early_stopping_callback, verbose_eval_callback,
evals_result_callback], **kwargs)
for k in names:
for key, val in evals_result[k].items():
name = f'{key}.{k}'
for epoch, m in enumerate(val):
R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)
def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):
if self.model is None:
raise ValueError('model is not fitted yet!')
x_test = dataset.prepare(segment, col_set='feature', data_key=
DataHandlerLP.DK_I)
return pd.Series(self.model.predict(x_test.values), index=x_test.index)
def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=
20, reweighter=None):
"""
finetune model
Parameters
----------
dataset : DatasetH
dataset for finetuning
num_boost_round : int
number of round to finetune model
verbose_eval : int
verbose level
"""
dtrain, _ = self._prepare_data(dataset, reweighter)
if dtrain.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.')
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
self.model = lgb.train(self.params, dtrain, num_boost_round=
num_boost_round, init_model=self.model, valid_sets=[dtrain],
valid_names=['train'], callbacks=[verbose_eval_callback])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LGBModel(ModelFT, LightGBMFInt):
"""LightGBM Model"""
def __init__(self, loss='mse', early_stopping_rounds=50,
num_boost_round=1000, **kwargs):
if loss not in {'mse', 'binary'}:
raise NotImplementedError
self.params = {'objective': loss, 'verbosity': -1}
self.params.update(kwargs)
self.early_stopping_rounds = early_stopping_rounds
self.num_boost_round = num_boost_round
self.model = None
def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple
[lgb.Dataset, str]]:
"""
The motivation of current version is to make validation optional
- train segment is necessary;
"""
ds_l = []
assert 'train' in dataset.segments
for key in ['train', 'valid']:
if key in dataset.segments:
df = dataset.prepare(key, col_set=['feature', 'label'],
data_key=DataHandlerLP.DK_L)
if df.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.'
)
x, y = df['feature'], df['label']
if y.values.ndim == 2 and y.values.shape[1] == 1:
y = np.squeeze(y.values)
else:
raise ValueError(
"LightGBM doesn't support multi-label training")
if reweighter is None:
w = None
elif isinstance(reweighter, Reweighter):
w = reweighter.reweight(df)
else:
raise ValueError('Unsupported reweighter type.')
ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))
return ds_l
def fit(self, dataset: DatasetH, num_boost_round=None,
early_stopping_rounds=None, verbose_eval=20, evals_result=None,
reweighter=None, **kwargs):
if evals_result is None:
evals_result = {}
ds_l = self._prepare_data(dataset, reweighter)
ds, names = list(zip(*ds_l))
early_stopping_callback = lgb.early_stopping(self.
early_stopping_rounds if early_stopping_rounds is None else
early_stopping_rounds)
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
evals_result_callback = lgb.record_evaluation(evals_result)
self.model = lgb.train(self.params, ds[0], num_boost_round=self.
num_boost_round if num_boost_round is None else num_boost_round,
valid_sets=ds, valid_names=names, callbacks=[
early_stopping_callback, verbose_eval_callback,
evals_result_callback], **kwargs)
for k in names:
for key, val in evals_result[k].items():
name = f'{key}.{k}'
for epoch, m in enumerate(val):
R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)
def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):
if self.model is None:
raise ValueError('model is not fitted yet!')
x_test = dataset.prepare(segment, col_set='feature', data_key=
DataHandlerLP.DK_I)
return pd.Series(self.model.predict(x_test.values), index=x_test.index)
def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=
20, reweighter=None):
"""
finetune model
Parameters
----------
dataset : DatasetH
dataset for finetuning
num_boost_round : int
number of round to finetune model
verbose_eval : int
verbose level
"""
dtrain, _ = self._prepare_data(dataset, reweighter)
if dtrain.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.')
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
self.model = lgb.train(self.params, dtrain, num_boost_round=
num_boost_round, init_model=self.model, valid_sets=[dtrain],
valid_names=['train'], callbacks=[verbose_eval_callback])
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import lightgbm as lgb
from typing import List, Text, Tuple, Union
from ...model.base import ModelFT
from ...data.dataset import DatasetH
from ...data.dataset.handler import DataHandlerLP
from ...model.interpret.base import LightGBMFInt
from ...data.dataset.weight import Reweighter
from qlib.workflow import R
class LGBModel(ModelFT, LightGBMFInt):
"""LightGBM Model"""
def __init__(self, loss='mse', early_stopping_rounds=50,
num_boost_round=1000, **kwargs):
if loss not in {'mse', 'binary'}:
raise NotImplementedError
self.params = {'objective': loss, 'verbosity': -1}
self.params.update(kwargs)
self.early_stopping_rounds = early_stopping_rounds
self.num_boost_round = num_boost_round
self.model = None
def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple
[lgb.Dataset, str]]:
"""
The motivation of current version is to make validation optional
- train segment is necessary;
"""
ds_l = []
assert 'train' in dataset.segments
for key in ['train', 'valid']:
if key in dataset.segments:
df = dataset.prepare(key, col_set=['feature', 'label'],
data_key=DataHandlerLP.DK_L)
if df.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.'
)
x, y = df['feature'], df['label']
if y.values.ndim == 2 and y.values.shape[1] == 1:
y = np.squeeze(y.values)
else:
raise ValueError(
"LightGBM doesn't support multi-label training")
if reweighter is None:
w = None
elif isinstance(reweighter, Reweighter):
w = reweighter.reweight(df)
else:
raise ValueError('Unsupported reweighter type.')
ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))
return ds_l
def fit(self, dataset: DatasetH, num_boost_round=None,
early_stopping_rounds=None, verbose_eval=20, evals_result=None,
reweighter=None, **kwargs):
if evals_result is None:
evals_result = {}
ds_l = self._prepare_data(dataset, reweighter)
ds, names = list(zip(*ds_l))
early_stopping_callback = lgb.early_stopping(self.
early_stopping_rounds if early_stopping_rounds is None else
early_stopping_rounds)
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
evals_result_callback = lgb.record_evaluation(evals_result)
self.model = lgb.train(self.params, ds[0], num_boost_round=self.
num_boost_round if num_boost_round is None else num_boost_round,
valid_sets=ds, valid_names=names, callbacks=[
early_stopping_callback, verbose_eval_callback,
evals_result_callback], **kwargs)
for k in names:
for key, val in evals_result[k].items():
name = f'{key}.{k}'
for epoch, m in enumerate(val):
R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)
def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):
if self.model is None:
raise ValueError('model is not fitted yet!')
x_test = dataset.prepare(segment, col_set='feature', data_key=
DataHandlerLP.DK_I)
return pd.Series(self.model.predict(x_test.values), index=x_test.index)
def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=
20, reweighter=None):
"""
finetune model
Parameters
----------
dataset : DatasetH
dataset for finetuning
num_boost_round : int
number of round to finetune model
verbose_eval : int
verbose level
"""
dtrain, _ = self._prepare_data(dataset, reweighter)
if dtrain.empty:
raise ValueError(
'Empty data from dataset, please check your dataset config.')
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
self.model = lgb.train(self.params, dtrain, num_boost_round=
num_boost_round, init_model=self.model, valid_sets=[dtrain],
valid_names=['train'], callbacks=[verbose_eval_callback])
<|reserved_special_token_1|>
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import numpy as np
import pandas as pd
import lightgbm as lgb
from typing import List, Text, Tuple, Union
from ...model.base import ModelFT
from ...data.dataset import DatasetH
from ...data.dataset.handler import DataHandlerLP
from ...model.interpret.base import LightGBMFInt
from ...data.dataset.weight import Reweighter
from qlib.workflow import R
class LGBModel(ModelFT, LightGBMFInt):
"""LightGBM Model"""
def __init__(self, loss="mse", early_stopping_rounds=50, num_boost_round=1000, **kwargs):
if loss not in {"mse", "binary"}:
raise NotImplementedError
self.params = {"objective": loss, "verbosity": -1}
self.params.update(kwargs)
self.early_stopping_rounds = early_stopping_rounds
self.num_boost_round = num_boost_round
self.model = None
def _prepare_data(self, dataset: DatasetH, reweighter=None) -> List[Tuple[lgb.Dataset, str]]:
"""
The motivation of current version is to make validation optional
- train segment is necessary;
"""
ds_l = []
assert "train" in dataset.segments
for key in ["train", "valid"]:
if key in dataset.segments:
df = dataset.prepare(key, col_set=["feature", "label"], data_key=DataHandlerLP.DK_L)
if df.empty:
raise ValueError("Empty data from dataset, please check your dataset config.")
x, y = df["feature"], df["label"]
# Lightgbm need 1D array as its label
if y.values.ndim == 2 and y.values.shape[1] == 1:
y = np.squeeze(y.values)
else:
raise ValueError("LightGBM doesn't support multi-label training")
if reweighter is None:
w = None
elif isinstance(reweighter, Reweighter):
w = reweighter.reweight(df)
else:
raise ValueError("Unsupported reweighter type.")
ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))
return ds_l
def fit(
self,
dataset: DatasetH,
num_boost_round=None,
early_stopping_rounds=None,
verbose_eval=20,
evals_result=None,
reweighter=None,
**kwargs,
):
if evals_result is None:
evals_result = {} # in case of unsafety of Python default values
ds_l = self._prepare_data(dataset, reweighter)
ds, names = list(zip(*ds_l))
early_stopping_callback = lgb.early_stopping(
self.early_stopping_rounds if early_stopping_rounds is None else early_stopping_rounds
)
# NOTE: if you encounter error here. Please upgrade your lightgbm
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
evals_result_callback = lgb.record_evaluation(evals_result)
self.model = lgb.train(
self.params,
ds[0], # training dataset
num_boost_round=self.num_boost_round if num_boost_round is None else num_boost_round,
valid_sets=ds,
valid_names=names,
callbacks=[early_stopping_callback, verbose_eval_callback, evals_result_callback],
**kwargs,
)
for k in names:
for key, val in evals_result[k].items():
name = f"{key}.{k}"
for epoch, m in enumerate(val):
R.log_metrics(**{name.replace("@", "_"): m}, step=epoch)
def predict(self, dataset: DatasetH, segment: Union[Text, slice] = "test"):
if self.model is None:
raise ValueError("model is not fitted yet!")
x_test = dataset.prepare(segment, col_set="feature", data_key=DataHandlerLP.DK_I)
return pd.Series(self.model.predict(x_test.values), index=x_test.index)
def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=20, reweighter=None):
"""
finetune model
Parameters
----------
dataset : DatasetH
dataset for finetuning
num_boost_round : int
number of round to finetune model
verbose_eval : int
verbose level
"""
# Based on existing model and finetune by train more rounds
dtrain, _ = self._prepare_data(dataset, reweighter) # pylint: disable=W0632
if dtrain.empty:
raise ValueError("Empty data from dataset, please check your dataset config.")
verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)
self.model = lgb.train(
self.params,
dtrain,
num_boost_round=num_boost_round,
init_model=self.model,
valid_sets=[dtrain],
valid_names=["train"],
callbacks=[verbose_eval_callback],
)
|
flexible
|
{
"blob_id": "d37187f067ddff94015e639a1759dddced817945",
"index": 6205,
"step-1": "<mask token>\n\n\nclass LGBModel(ModelFT, LightGBMFInt):\n <mask token>\n\n def __init__(self, loss='mse', early_stopping_rounds=50,\n num_boost_round=1000, **kwargs):\n if loss not in {'mse', 'binary'}:\n raise NotImplementedError\n self.params = {'objective': loss, 'verbosity': -1}\n self.params.update(kwargs)\n self.early_stopping_rounds = early_stopping_rounds\n self.num_boost_round = num_boost_round\n self.model = None\n\n def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple\n [lgb.Dataset, str]]:\n \"\"\"\n The motivation of current version is to make validation optional\n - train segment is necessary;\n \"\"\"\n ds_l = []\n assert 'train' in dataset.segments\n for key in ['train', 'valid']:\n if key in dataset.segments:\n df = dataset.prepare(key, col_set=['feature', 'label'],\n data_key=DataHandlerLP.DK_L)\n if df.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.'\n )\n x, y = df['feature'], df['label']\n if y.values.ndim == 2 and y.values.shape[1] == 1:\n y = np.squeeze(y.values)\n else:\n raise ValueError(\n \"LightGBM doesn't support multi-label training\")\n if reweighter is None:\n w = None\n elif isinstance(reweighter, Reweighter):\n w = reweighter.reweight(df)\n else:\n raise ValueError('Unsupported reweighter type.')\n ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))\n return ds_l\n\n def fit(self, dataset: DatasetH, num_boost_round=None,\n early_stopping_rounds=None, verbose_eval=20, evals_result=None,\n reweighter=None, **kwargs):\n if evals_result is None:\n evals_result = {}\n ds_l = self._prepare_data(dataset, reweighter)\n ds, names = list(zip(*ds_l))\n early_stopping_callback = lgb.early_stopping(self.\n early_stopping_rounds if early_stopping_rounds is None else\n early_stopping_rounds)\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n evals_result_callback = lgb.record_evaluation(evals_result)\n self.model = lgb.train(self.params, ds[0], num_boost_round=self.\n num_boost_round if num_boost_round is None else num_boost_round,\n valid_sets=ds, valid_names=names, callbacks=[\n early_stopping_callback, verbose_eval_callback,\n evals_result_callback], **kwargs)\n for k in names:\n for key, val in evals_result[k].items():\n name = f'{key}.{k}'\n for epoch, m in enumerate(val):\n R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)\n <mask token>\n\n def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=\n 20, reweighter=None):\n \"\"\"\n finetune model\n\n Parameters\n ----------\n dataset : DatasetH\n dataset for finetuning\n num_boost_round : int\n number of round to finetune model\n verbose_eval : int\n verbose level\n \"\"\"\n dtrain, _ = self._prepare_data(dataset, reweighter)\n if dtrain.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.')\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n self.model = lgb.train(self.params, dtrain, num_boost_round=\n num_boost_round, init_model=self.model, valid_sets=[dtrain],\n valid_names=['train'], callbacks=[verbose_eval_callback])\n",
"step-2": "<mask token>\n\n\nclass LGBModel(ModelFT, LightGBMFInt):\n <mask token>\n\n def __init__(self, loss='mse', early_stopping_rounds=50,\n num_boost_round=1000, **kwargs):\n if loss not in {'mse', 'binary'}:\n raise NotImplementedError\n self.params = {'objective': loss, 'verbosity': -1}\n self.params.update(kwargs)\n self.early_stopping_rounds = early_stopping_rounds\n self.num_boost_round = num_boost_round\n self.model = None\n\n def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple\n [lgb.Dataset, str]]:\n \"\"\"\n The motivation of current version is to make validation optional\n - train segment is necessary;\n \"\"\"\n ds_l = []\n assert 'train' in dataset.segments\n for key in ['train', 'valid']:\n if key in dataset.segments:\n df = dataset.prepare(key, col_set=['feature', 'label'],\n data_key=DataHandlerLP.DK_L)\n if df.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.'\n )\n x, y = df['feature'], df['label']\n if y.values.ndim == 2 and y.values.shape[1] == 1:\n y = np.squeeze(y.values)\n else:\n raise ValueError(\n \"LightGBM doesn't support multi-label training\")\n if reweighter is None:\n w = None\n elif isinstance(reweighter, Reweighter):\n w = reweighter.reweight(df)\n else:\n raise ValueError('Unsupported reweighter type.')\n ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))\n return ds_l\n\n def fit(self, dataset: DatasetH, num_boost_round=None,\n early_stopping_rounds=None, verbose_eval=20, evals_result=None,\n reweighter=None, **kwargs):\n if evals_result is None:\n evals_result = {}\n ds_l = self._prepare_data(dataset, reweighter)\n ds, names = list(zip(*ds_l))\n early_stopping_callback = lgb.early_stopping(self.\n early_stopping_rounds if early_stopping_rounds is None else\n early_stopping_rounds)\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n evals_result_callback = lgb.record_evaluation(evals_result)\n self.model = lgb.train(self.params, ds[0], num_boost_round=self.\n num_boost_round if num_boost_round is None else num_boost_round,\n valid_sets=ds, valid_names=names, callbacks=[\n early_stopping_callback, verbose_eval_callback,\n evals_result_callback], **kwargs)\n for k in names:\n for key, val in evals_result[k].items():\n name = f'{key}.{k}'\n for epoch, m in enumerate(val):\n R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)\n\n def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):\n if self.model is None:\n raise ValueError('model is not fitted yet!')\n x_test = dataset.prepare(segment, col_set='feature', data_key=\n DataHandlerLP.DK_I)\n return pd.Series(self.model.predict(x_test.values), index=x_test.index)\n\n def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=\n 20, reweighter=None):\n \"\"\"\n finetune model\n\n Parameters\n ----------\n dataset : DatasetH\n dataset for finetuning\n num_boost_round : int\n number of round to finetune model\n verbose_eval : int\n verbose level\n \"\"\"\n dtrain, _ = self._prepare_data(dataset, reweighter)\n if dtrain.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.')\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n self.model = lgb.train(self.params, dtrain, num_boost_round=\n num_boost_round, init_model=self.model, valid_sets=[dtrain],\n valid_names=['train'], callbacks=[verbose_eval_callback])\n",
"step-3": "<mask token>\n\n\nclass LGBModel(ModelFT, LightGBMFInt):\n \"\"\"LightGBM Model\"\"\"\n\n def __init__(self, loss='mse', early_stopping_rounds=50,\n num_boost_round=1000, **kwargs):\n if loss not in {'mse', 'binary'}:\n raise NotImplementedError\n self.params = {'objective': loss, 'verbosity': -1}\n self.params.update(kwargs)\n self.early_stopping_rounds = early_stopping_rounds\n self.num_boost_round = num_boost_round\n self.model = None\n\n def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple\n [lgb.Dataset, str]]:\n \"\"\"\n The motivation of current version is to make validation optional\n - train segment is necessary;\n \"\"\"\n ds_l = []\n assert 'train' in dataset.segments\n for key in ['train', 'valid']:\n if key in dataset.segments:\n df = dataset.prepare(key, col_set=['feature', 'label'],\n data_key=DataHandlerLP.DK_L)\n if df.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.'\n )\n x, y = df['feature'], df['label']\n if y.values.ndim == 2 and y.values.shape[1] == 1:\n y = np.squeeze(y.values)\n else:\n raise ValueError(\n \"LightGBM doesn't support multi-label training\")\n if reweighter is None:\n w = None\n elif isinstance(reweighter, Reweighter):\n w = reweighter.reweight(df)\n else:\n raise ValueError('Unsupported reweighter type.')\n ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))\n return ds_l\n\n def fit(self, dataset: DatasetH, num_boost_round=None,\n early_stopping_rounds=None, verbose_eval=20, evals_result=None,\n reweighter=None, **kwargs):\n if evals_result is None:\n evals_result = {}\n ds_l = self._prepare_data(dataset, reweighter)\n ds, names = list(zip(*ds_l))\n early_stopping_callback = lgb.early_stopping(self.\n early_stopping_rounds if early_stopping_rounds is None else\n early_stopping_rounds)\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n evals_result_callback = lgb.record_evaluation(evals_result)\n self.model = lgb.train(self.params, ds[0], num_boost_round=self.\n num_boost_round if num_boost_round is None else num_boost_round,\n valid_sets=ds, valid_names=names, callbacks=[\n early_stopping_callback, verbose_eval_callback,\n evals_result_callback], **kwargs)\n for k in names:\n for key, val in evals_result[k].items():\n name = f'{key}.{k}'\n for epoch, m in enumerate(val):\n R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)\n\n def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):\n if self.model is None:\n raise ValueError('model is not fitted yet!')\n x_test = dataset.prepare(segment, col_set='feature', data_key=\n DataHandlerLP.DK_I)\n return pd.Series(self.model.predict(x_test.values), index=x_test.index)\n\n def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=\n 20, reweighter=None):\n \"\"\"\n finetune model\n\n Parameters\n ----------\n dataset : DatasetH\n dataset for finetuning\n num_boost_round : int\n number of round to finetune model\n verbose_eval : int\n verbose level\n \"\"\"\n dtrain, _ = self._prepare_data(dataset, reweighter)\n if dtrain.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.')\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n self.model = lgb.train(self.params, dtrain, num_boost_round=\n num_boost_round, init_model=self.model, valid_sets=[dtrain],\n valid_names=['train'], callbacks=[verbose_eval_callback])\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport lightgbm as lgb\nfrom typing import List, Text, Tuple, Union\nfrom ...model.base import ModelFT\nfrom ...data.dataset import DatasetH\nfrom ...data.dataset.handler import DataHandlerLP\nfrom ...model.interpret.base import LightGBMFInt\nfrom ...data.dataset.weight import Reweighter\nfrom qlib.workflow import R\n\n\nclass LGBModel(ModelFT, LightGBMFInt):\n \"\"\"LightGBM Model\"\"\"\n\n def __init__(self, loss='mse', early_stopping_rounds=50,\n num_boost_round=1000, **kwargs):\n if loss not in {'mse', 'binary'}:\n raise NotImplementedError\n self.params = {'objective': loss, 'verbosity': -1}\n self.params.update(kwargs)\n self.early_stopping_rounds = early_stopping_rounds\n self.num_boost_round = num_boost_round\n self.model = None\n\n def _prepare_data(self, dataset: DatasetH, reweighter=None) ->List[Tuple\n [lgb.Dataset, str]]:\n \"\"\"\n The motivation of current version is to make validation optional\n - train segment is necessary;\n \"\"\"\n ds_l = []\n assert 'train' in dataset.segments\n for key in ['train', 'valid']:\n if key in dataset.segments:\n df = dataset.prepare(key, col_set=['feature', 'label'],\n data_key=DataHandlerLP.DK_L)\n if df.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.'\n )\n x, y = df['feature'], df['label']\n if y.values.ndim == 2 and y.values.shape[1] == 1:\n y = np.squeeze(y.values)\n else:\n raise ValueError(\n \"LightGBM doesn't support multi-label training\")\n if reweighter is None:\n w = None\n elif isinstance(reweighter, Reweighter):\n w = reweighter.reweight(df)\n else:\n raise ValueError('Unsupported reweighter type.')\n ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))\n return ds_l\n\n def fit(self, dataset: DatasetH, num_boost_round=None,\n early_stopping_rounds=None, verbose_eval=20, evals_result=None,\n reweighter=None, **kwargs):\n if evals_result is None:\n evals_result = {}\n ds_l = self._prepare_data(dataset, reweighter)\n ds, names = list(zip(*ds_l))\n early_stopping_callback = lgb.early_stopping(self.\n early_stopping_rounds if early_stopping_rounds is None else\n early_stopping_rounds)\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n evals_result_callback = lgb.record_evaluation(evals_result)\n self.model = lgb.train(self.params, ds[0], num_boost_round=self.\n num_boost_round if num_boost_round is None else num_boost_round,\n valid_sets=ds, valid_names=names, callbacks=[\n early_stopping_callback, verbose_eval_callback,\n evals_result_callback], **kwargs)\n for k in names:\n for key, val in evals_result[k].items():\n name = f'{key}.{k}'\n for epoch, m in enumerate(val):\n R.log_metrics(**{name.replace('@', '_'): m}, step=epoch)\n\n def predict(self, dataset: DatasetH, segment: Union[Text, slice]='test'):\n if self.model is None:\n raise ValueError('model is not fitted yet!')\n x_test = dataset.prepare(segment, col_set='feature', data_key=\n DataHandlerLP.DK_I)\n return pd.Series(self.model.predict(x_test.values), index=x_test.index)\n\n def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=\n 20, reweighter=None):\n \"\"\"\n finetune model\n\n Parameters\n ----------\n dataset : DatasetH\n dataset for finetuning\n num_boost_round : int\n number of round to finetune model\n verbose_eval : int\n verbose level\n \"\"\"\n dtrain, _ = self._prepare_data(dataset, reweighter)\n if dtrain.empty:\n raise ValueError(\n 'Empty data from dataset, please check your dataset config.')\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n self.model = lgb.train(self.params, dtrain, num_boost_round=\n num_boost_round, init_model=self.model, valid_sets=[dtrain],\n valid_names=['train'], callbacks=[verbose_eval_callback])\n",
"step-5": "# Copyright (c) Microsoft Corporation.\n# Licensed under the MIT License.\n\nimport numpy as np\nimport pandas as pd\nimport lightgbm as lgb\nfrom typing import List, Text, Tuple, Union\nfrom ...model.base import ModelFT\nfrom ...data.dataset import DatasetH\nfrom ...data.dataset.handler import DataHandlerLP\nfrom ...model.interpret.base import LightGBMFInt\nfrom ...data.dataset.weight import Reweighter\nfrom qlib.workflow import R\n\n\nclass LGBModel(ModelFT, LightGBMFInt):\n \"\"\"LightGBM Model\"\"\"\n\n def __init__(self, loss=\"mse\", early_stopping_rounds=50, num_boost_round=1000, **kwargs):\n if loss not in {\"mse\", \"binary\"}:\n raise NotImplementedError\n self.params = {\"objective\": loss, \"verbosity\": -1}\n self.params.update(kwargs)\n self.early_stopping_rounds = early_stopping_rounds\n self.num_boost_round = num_boost_round\n self.model = None\n\n def _prepare_data(self, dataset: DatasetH, reweighter=None) -> List[Tuple[lgb.Dataset, str]]:\n \"\"\"\n The motivation of current version is to make validation optional\n - train segment is necessary;\n \"\"\"\n ds_l = []\n assert \"train\" in dataset.segments\n for key in [\"train\", \"valid\"]:\n if key in dataset.segments:\n df = dataset.prepare(key, col_set=[\"feature\", \"label\"], data_key=DataHandlerLP.DK_L)\n if df.empty:\n raise ValueError(\"Empty data from dataset, please check your dataset config.\")\n x, y = df[\"feature\"], df[\"label\"]\n\n # Lightgbm need 1D array as its label\n if y.values.ndim == 2 and y.values.shape[1] == 1:\n y = np.squeeze(y.values)\n else:\n raise ValueError(\"LightGBM doesn't support multi-label training\")\n\n if reweighter is None:\n w = None\n elif isinstance(reweighter, Reweighter):\n w = reweighter.reweight(df)\n else:\n raise ValueError(\"Unsupported reweighter type.\")\n ds_l.append((lgb.Dataset(x.values, label=y, weight=w), key))\n return ds_l\n\n def fit(\n self,\n dataset: DatasetH,\n num_boost_round=None,\n early_stopping_rounds=None,\n verbose_eval=20,\n evals_result=None,\n reweighter=None,\n **kwargs,\n ):\n if evals_result is None:\n evals_result = {} # in case of unsafety of Python default values\n ds_l = self._prepare_data(dataset, reweighter)\n ds, names = list(zip(*ds_l))\n early_stopping_callback = lgb.early_stopping(\n self.early_stopping_rounds if early_stopping_rounds is None else early_stopping_rounds\n )\n # NOTE: if you encounter error here. Please upgrade your lightgbm\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n evals_result_callback = lgb.record_evaluation(evals_result)\n self.model = lgb.train(\n self.params,\n ds[0], # training dataset\n num_boost_round=self.num_boost_round if num_boost_round is None else num_boost_round,\n valid_sets=ds,\n valid_names=names,\n callbacks=[early_stopping_callback, verbose_eval_callback, evals_result_callback],\n **kwargs,\n )\n for k in names:\n for key, val in evals_result[k].items():\n name = f\"{key}.{k}\"\n for epoch, m in enumerate(val):\n R.log_metrics(**{name.replace(\"@\", \"_\"): m}, step=epoch)\n\n def predict(self, dataset: DatasetH, segment: Union[Text, slice] = \"test\"):\n if self.model is None:\n raise ValueError(\"model is not fitted yet!\")\n x_test = dataset.prepare(segment, col_set=\"feature\", data_key=DataHandlerLP.DK_I)\n return pd.Series(self.model.predict(x_test.values), index=x_test.index)\n\n def finetune(self, dataset: DatasetH, num_boost_round=10, verbose_eval=20, reweighter=None):\n \"\"\"\n finetune model\n\n Parameters\n ----------\n dataset : DatasetH\n dataset for finetuning\n num_boost_round : int\n number of round to finetune model\n verbose_eval : int\n verbose level\n \"\"\"\n # Based on existing model and finetune by train more rounds\n dtrain, _ = self._prepare_data(dataset, reweighter) # pylint: disable=W0632\n if dtrain.empty:\n raise ValueError(\"Empty data from dataset, please check your dataset config.\")\n verbose_eval_callback = lgb.log_evaluation(period=verbose_eval)\n self.model = lgb.train(\n self.params,\n dtrain,\n num_boost_round=num_boost_round,\n init_model=self.model,\n valid_sets=[dtrain],\n valid_names=[\"train\"],\n callbacks=[verbose_eval_callback],\n )\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
""" view.py: Contains the View class. """
import random
import config
from graphics import *
class View:
""" The view class which handles the visual component of the application.
"""
def __init__(self, pygame, master):
""" Set up and initialise the view. Does not start the display. """
self._pygame = pygame
self._master = master
self._display = self._pygame.display
self._interface = None
self._state = None
self._cycle_colour = (200, 0, 0)
self._white = (255, 255, 255)
def start(self):
""" Start the display. """
self._screen = self._display.set_mode((640, 480))
self._display.set_caption('PolyominOhs!')
self._pygame.mouse.set_visible(0)
def update(self):
""" Update the screen. """
# Constantly cycle through a colour
h, s, v = rgb2hsv(self._cycle_colour)
h += 1
self._cycle_colour = hsv2rgb((h, s, v))
if self._state == config.GS_LOADING:
self._screen.blit(self._background, (0, 0))
elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,
config.GS_MENU_HIGHSCORES, config.GS_MENU_HELP]:
# Get current selections
selected = self._interface.get_selection()
settings = {config.MENU_LEVEL: str(self._interface.get_level()),
config.MENU_ORDER: str(self._interface.get_order()),
config.MENU_SFX: self._interface.get_sfx(),
config.MENU_MUSIC: self._interface.get_music()}
# Background and title
self._screen.blit(self._background, (0, 0))
draw_text(self._screen, (120, 25), 'PolyominOhs!', 36,
self._cycle_colour, self._pygame, True)
# Buttons
for button in self._buttons.items():
if button[0] == selected:
button[1].draw(self._screen, config.TXT_HOVER,
self._pygame, self._cycle_colour)
else:
button[1].draw(self._screen, config.TXT_NORMAL,
self._pygame)
# Radio Selections
for radio in self._radios.items():
if radio[0] == selected:
radio[1].draw(self._screen, settings[radio[0]],
config.TXT_HOVER, self._cycle_colour,
self._pygame)
else:
radio[1].draw(self._screen, settings[radio[0]],
config.TXT_NORMAL, self._cycle_colour,
self._pygame)
# Random polyomino
order = self._interface.get_order()
ominoes = self._master._ominoes[order - 1]
n = self._interface.get_random_omino()
shape = ominoes[0][n]
draw_polyomino(self._screen, (400, 160), shape, 21,
self._cycle_colour, self._pygame)
# Highscores
if self._state == config.GS_MENU_HIGHSCORES:
draw_border(self._highscores, self._cycle_colour, self._pygame)
for i, highscore in enumerate(self._master.get_highscores()):
name, score = highscore
name = name.replace('_', ' ')
if self._interface.get_highscore_highlight() == i:
colour = self._cycle_colour
else:
colour = self._white
draw_text(self._highscores, (20, 10 + (i + 1) * 25), name,
10, colour, self._pygame)
draw_text(self._highscores, (175, 10 + (i + 1) * 25),
str(score), 10, colour, self._pygame)
self._screen.blit(self._highscores, (200, 100))
# Enter highscore
if self._state == config.GS_MENU_ENTER_HIGHSCORE:
self._enterhighscore.fill((0, 0, 0))
draw_border(self._enterhighscore, self._cycle_colour,
self._pygame)
draw_text(self._enterhighscore, (60, 20), 'Highscore!', 14,
self._white, self._pygame)
draw_text(self._enterhighscore, (20, 60),
'Please enter your name:', 10, self._white,
self._pygame)
draw_text(self._enterhighscore, (70, 170), 'Press return', 10,
self._white, self._pygame)
self._name_entry.update(self._interface.get_highscore_name())
self._name_entry.draw(self._enterhighscore,
self._interface.get_name_selected(),
self._cycle_colour, self._pygame)
self._screen.blit(self._enterhighscore, (200, 120))
# Help
if self._state == config.GS_MENU_HELP:
draw_border(self._help, self._cycle_colour, self._pygame)
self._screen.blit(self._help, (115, 120))
elif self._state in [config.GS_GAME, config.GS_GAME_PAUSED,
config.GS_GAME_OVER]:
# Get current information
score = str(self._interface.get_score())
lines = str(self._interface.get_lines_cleared())
next_omino = self._interface.get_next_omino()
self._screen.blit(self._background, (0, 0))
# Score and number of lines cleared
draw_text(self._screen, (445, 155), score, 10, self._white,
self._pygame)
draw_text(self._screen, (445, 215), lines, 10, self._white,
self._pygame)
# Draw next polyomino
if self._state == config.GS_GAME:
draw_polyomino(self._screen, (440, 290), next_omino.get_shape(0),
21, next_omino.get_colour(), self._pygame)
# Draw grid of blocks (or pause or game over screen)
grid = self._interface.get_field().get_complete_grid()
self._grid.fill((0, 0, 0))
draw_border(self._grid, self._cycle_colour, self._pygame)
if self._state == config.GS_GAME:
size = config.sizes[self._interface.get_order()]
draw_grid(self._grid, (5, 5), grid, size, self._pygame)
elif self._state == config.GS_GAME_PAUSED:
draw_text(self._grid, (30, 115), 'Game Paused', 14,
self._cycle_colour, self._pygame, True)
draw_text(self._grid, (40, 185), 'Press y to quit', 10,
self._white, self._pygame)
draw_text(self._grid, (30, 215), 'or esc to resume', 10,
self._white, self._pygame)
elif self._state == config.GS_GAME_OVER:
draw_text(self._grid, (42, 115), 'Game Over', 14,
self._cycle_colour, self._pygame, True)
draw_text(self._grid, (47, 185), 'Press return', 10,
self._white, self._pygame)
self._screen.blit(self._grid, (60, 30))
self._display.flip()
def change_state(self, state, interface=None):
""" Change the state of the application and get the new interface
(if given). Set up graphics for the new state if required.
change_state(int, Menu/Game) -> void
"""
self._state = state
if interface != None:
self._interface = interface
if self._state == config.GS_LOADING:
# Background with loading text
self._background = self._pygame.Surface(self._screen.get_size())
self._background = self._background.convert()
self._background.fill((0, 0, 0))
draw_text(self._background, (180, 180), 'Loading...', 36,
self._white, self._pygame)
elif self._state == config.GS_GAME:
# Background with static text
self._background = self._pygame.Surface(self._screen.get_size())
self._background = self._background.convert()
self._background.fill((0, 0, 0))
draw_text(self._background, (410, 130), 'Score:', 10,
self._white, self._pygame)
draw_text(self._background, (410, 190), 'Lines Cleared:', 10,
self._white, self._pygame)
next_text = 'Next ' + \
config.names[self._interface.get_order()].title() + ':'
draw_text(self._background, (410, 250), next_text, 10,
self._white, self._pygame)
# Grid
w = 210 + 10 - self._interface.get_field().get_size()[0] + 1
h = 420 + 10 - self._interface.get_field().get_size()[1] + 1
self._grid = self._pygame.Surface((w, h))
self._grid = self._grid.convert()
self._grid.fill((0, 0, 0))
self._grid.set_colorkey((0, 0, 0))
elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,
config.GS_MENU_HIGHSCORES]:
# Background with static text
self._background = self._pygame.Surface(self._screen.get_size())
self._background = self._background.convert()
self._background.fill((0, 0, 0))
draw_text(self._background, (110, 300), 'Settings:', 10,
self._white, self._pygame)
draw_text(self._background, (130, 340), 'Difficulty Level:', 10,
self._white, self._pygame)
draw_text(self._background, (130, 400), 'Polyomino Order:', 10,
self._white, self._pygame)
draw_text(self._background, (370, 300), 'Audio:', 10,
self._white, self._pygame)
draw_text(self._background, (400, 340), 'Sound Effects:', 10,
self._white, self._pygame)
draw_text(self._background, (400, 400), 'Music:', 10,
self._white, self._pygame)
# Buttons
self._buttons = {}
start_game_button = Button('Start Game', 10, (90, 150))
self._buttons.update({config.MENU_START: start_game_button})
view_highscores_button = Button('View Highscores', 10, (90, 180))
self._buttons.update({config.MENU_HIGHSCORES: view_highscores_button})
help_button = Button('Help', 10, (90, 210))
self._buttons.update({config.MENU_HELP: help_button})
quit_button = Button('Quit', 10, (90, 240))
self._buttons.update({config.MENU_QUIT: quit_button})
# Radio Selections
self._radios = {}
level_selection = Radio_Selection([str(n + 1) for n in range(9)],
10, (160, 365))
self._radios.update({config.MENU_LEVEL: level_selection})
order_selection = Radio_Selection([str(n + 1) for n in range(6)],
10, (160, 425))
self._radios.update({config.MENU_ORDER: order_selection})
sfx_selection = Radio_Selection(['On', 'Off'], 10, (435, 365))
self._radios.update({config.MENU_SFX: sfx_selection})
music_selection = Radio_Selection(['On', 'Off'], 10, (435, 425))
self._radios.update({config.MENU_MUSIC: music_selection})
# Highscores Screen
self._highscores = self._pygame.Surface((250, 300))
self._highscores = self._highscores.convert()
self._highscores.fill((0, 0, 0))
draw_text(self._highscores, (15, 10), 'Highscores:', 10,
self._white, self._pygame)
# Enter highscore name screen
self._enterhighscore = self._pygame.Surface((250, 210))
self._enterhighscore = self._enterhighscore.convert()
self._enterhighscore.fill((0, 0, 0))
self._name_entry = Text_Entry(3, ['A', 'A', 'A'], 20, (85, 105))
# Help Screen
self._help = self._pygame.Surface((410, 240))
self._help = self._help.convert()
self._help.fill((0, 0, 0))
draw_text(self._help, (15, 10), 'Controls:', 10, self._white,
self._pygame)
draw_text(self._help, (205, 10), 'Instructions:', 10,
self._white, self._pygame)
draw_text(self._help, (20, 45), 'Up - Rotate', 10, self._white,
self._pygame)
draw_text(self._help, (20, 75), 'Left - Move Left', 10,
self._white, self._pygame)
draw_text(self._help, (20, 105), 'Right - Move Right', 10,
self._white, self._pygame)
draw_text(self._help, (20, 135), 'Down - Move Down', 10,
self._white, self._pygame)
draw_text(self._help, (20, 165), 'Space - Drop', 10, self._white,
self._pygame)
draw_text(self._help, (20, 195), 'Esc - Pause', 10, self._white,
self._pygame)
text = config.instructions
rect = self._pygame.Rect(0, 0, 190, 190)
instructions = render_textrect(text, 8, rect, self._white,
(0, 0, 0), 0, self._pygame)
self._help.blit(instructions, (210, 45))
|
normal
|
{
"blob_id": "2168d10a1b4796576cc7ebb6893e0dc8b58085ca",
"index": 4435,
"step-1": "<mask token>\n\n\nclass View:\n <mask token>\n\n def __init__(self, pygame, master):\n \"\"\" Set up and initialise the view. Does not start the display. \"\"\"\n self._pygame = pygame\n self._master = master\n self._display = self._pygame.display\n self._interface = None\n self._state = None\n self._cycle_colour = 200, 0, 0\n self._white = 255, 255, 255\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass View:\n <mask token>\n\n def __init__(self, pygame, master):\n \"\"\" Set up and initialise the view. Does not start the display. \"\"\"\n self._pygame = pygame\n self._master = master\n self._display = self._pygame.display\n self._interface = None\n self._state = None\n self._cycle_colour = 200, 0, 0\n self._white = 255, 255, 255\n\n def start(self):\n \"\"\" Start the display. \"\"\"\n self._screen = self._display.set_mode((640, 480))\n self._display.set_caption('PolyominOhs!')\n self._pygame.mouse.set_visible(0)\n\n def update(self):\n \"\"\" Update the screen. \"\"\"\n h, s, v = rgb2hsv(self._cycle_colour)\n h += 1\n self._cycle_colour = hsv2rgb((h, s, v))\n if self._state == config.GS_LOADING:\n self._screen.blit(self._background, (0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES, config.GS_MENU_HELP]:\n selected = self._interface.get_selection()\n settings = {config.MENU_LEVEL: str(self._interface.get_level()),\n config.MENU_ORDER: str(self._interface.get_order()), config\n .MENU_SFX: self._interface.get_sfx(), config.MENU_MUSIC:\n self._interface.get_music()}\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (120, 25), 'PolyominOhs!', 36, self.\n _cycle_colour, self._pygame, True)\n for button in self._buttons.items():\n if button[0] == selected:\n button[1].draw(self._screen, config.TXT_HOVER, self.\n _pygame, self._cycle_colour)\n else:\n button[1].draw(self._screen, config.TXT_NORMAL, self.\n _pygame)\n for radio in self._radios.items():\n if radio[0] == selected:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_HOVER, self._cycle_colour, self._pygame)\n else:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_NORMAL, self._cycle_colour, self._pygame)\n order = self._interface.get_order()\n ominoes = self._master._ominoes[order - 1]\n n = self._interface.get_random_omino()\n shape = ominoes[0][n]\n draw_polyomino(self._screen, (400, 160), shape, 21, self.\n _cycle_colour, self._pygame)\n if self._state == config.GS_MENU_HIGHSCORES:\n draw_border(self._highscores, self._cycle_colour, self._pygame)\n for i, highscore in enumerate(self._master.get_highscores()):\n name, score = highscore\n name = name.replace('_', ' ')\n if self._interface.get_highscore_highlight() == i:\n colour = self._cycle_colour\n else:\n colour = self._white\n draw_text(self._highscores, (20, 10 + (i + 1) * 25),\n name, 10, colour, self._pygame)\n draw_text(self._highscores, (175, 10 + (i + 1) * 25),\n str(score), 10, colour, self._pygame)\n self._screen.blit(self._highscores, (200, 100))\n if self._state == config.GS_MENU_ENTER_HIGHSCORE:\n self._enterhighscore.fill((0, 0, 0))\n draw_border(self._enterhighscore, self._cycle_colour, self.\n _pygame)\n draw_text(self._enterhighscore, (60, 20), 'Highscore!', 14,\n self._white, self._pygame)\n draw_text(self._enterhighscore, (20, 60),\n 'Please enter your name:', 10, self._white, self._pygame)\n draw_text(self._enterhighscore, (70, 170), 'Press return', \n 10, self._white, self._pygame)\n self._name_entry.update(self._interface.get_highscore_name())\n self._name_entry.draw(self._enterhighscore, self._interface\n .get_name_selected(), self._cycle_colour, self._pygame)\n self._screen.blit(self._enterhighscore, (200, 120))\n if self._state == config.GS_MENU_HELP:\n draw_border(self._help, self._cycle_colour, self._pygame)\n self._screen.blit(self._help, (115, 120))\n elif self._state in [config.GS_GAME, config.GS_GAME_PAUSED, config.\n GS_GAME_OVER]:\n score = str(self._interface.get_score())\n lines = str(self._interface.get_lines_cleared())\n next_omino = self._interface.get_next_omino()\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (445, 155), score, 10, self._white,\n self._pygame)\n draw_text(self._screen, (445, 215), lines, 10, self._white,\n self._pygame)\n if self._state == config.GS_GAME:\n draw_polyomino(self._screen, (440, 290), next_omino.\n get_shape(0), 21, next_omino.get_colour(), self._pygame)\n grid = self._interface.get_field().get_complete_grid()\n self._grid.fill((0, 0, 0))\n draw_border(self._grid, self._cycle_colour, self._pygame)\n if self._state == config.GS_GAME:\n size = config.sizes[self._interface.get_order()]\n draw_grid(self._grid, (5, 5), grid, size, self._pygame)\n elif self._state == config.GS_GAME_PAUSED:\n draw_text(self._grid, (30, 115), 'Game Paused', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (40, 185), 'Press y to quit', 10,\n self._white, self._pygame)\n draw_text(self._grid, (30, 215), 'or esc to resume', 10,\n self._white, self._pygame)\n elif self._state == config.GS_GAME_OVER:\n draw_text(self._grid, (42, 115), 'Game Over', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (47, 185), 'Press return', 10, self.\n _white, self._pygame)\n self._screen.blit(self._grid, (60, 30))\n self._display.flip()\n\n def change_state(self, state, interface=None):\n \"\"\" Change the state of the application and get the new interface\n (if given). Set up graphics for the new state if required.\n \n change_state(int, Menu/Game) -> void\n \"\"\"\n self._state = state\n if interface != None:\n self._interface = interface\n if self._state == config.GS_LOADING:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (180, 180), 'Loading...', 36, self.\n _white, self._pygame)\n elif self._state == config.GS_GAME:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (410, 130), 'Score:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (410, 190), 'Lines Cleared:', 10,\n self._white, self._pygame)\n next_text = 'Next ' + config.names[self._interface.get_order()\n ].title() + ':'\n draw_text(self._background, (410, 250), next_text, 10, self.\n _white, self._pygame)\n w = 210 + 10 - self._interface.get_field().get_size()[0] + 1\n h = 420 + 10 - self._interface.get_field().get_size()[1] + 1\n self._grid = self._pygame.Surface((w, h))\n self._grid = self._grid.convert()\n self._grid.fill((0, 0, 0))\n self._grid.set_colorkey((0, 0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES]:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (110, 300), 'Settings:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (130, 340), 'Difficulty Level:', 10,\n self._white, self._pygame)\n draw_text(self._background, (130, 400), 'Polyomino Order:', 10,\n self._white, self._pygame)\n draw_text(self._background, (370, 300), 'Audio:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (400, 340), 'Sound Effects:', 10,\n self._white, self._pygame)\n draw_text(self._background, (400, 400), 'Music:', 10, self.\n _white, self._pygame)\n self._buttons = {}\n start_game_button = Button('Start Game', 10, (90, 150))\n self._buttons.update({config.MENU_START: start_game_button})\n view_highscores_button = Button('View Highscores', 10, (90, 180))\n self._buttons.update({config.MENU_HIGHSCORES:\n view_highscores_button})\n help_button = Button('Help', 10, (90, 210))\n self._buttons.update({config.MENU_HELP: help_button})\n quit_button = Button('Quit', 10, (90, 240))\n self._buttons.update({config.MENU_QUIT: quit_button})\n self._radios = {}\n level_selection = Radio_Selection([str(n + 1) for n in range(9)\n ], 10, (160, 365))\n self._radios.update({config.MENU_LEVEL: level_selection})\n order_selection = Radio_Selection([str(n + 1) for n in range(6)\n ], 10, (160, 425))\n self._radios.update({config.MENU_ORDER: order_selection})\n sfx_selection = Radio_Selection(['On', 'Off'], 10, (435, 365))\n self._radios.update({config.MENU_SFX: sfx_selection})\n music_selection = Radio_Selection(['On', 'Off'], 10, (435, 425))\n self._radios.update({config.MENU_MUSIC: music_selection})\n self._highscores = self._pygame.Surface((250, 300))\n self._highscores = self._highscores.convert()\n self._highscores.fill((0, 0, 0))\n draw_text(self._highscores, (15, 10), 'Highscores:', 10, self.\n _white, self._pygame)\n self._enterhighscore = self._pygame.Surface((250, 210))\n self._enterhighscore = self._enterhighscore.convert()\n self._enterhighscore.fill((0, 0, 0))\n self._name_entry = Text_Entry(3, ['A', 'A', 'A'], 20, (85, 105))\n self._help = self._pygame.Surface((410, 240))\n self._help = self._help.convert()\n self._help.fill((0, 0, 0))\n draw_text(self._help, (15, 10), 'Controls:', 10, self._white,\n self._pygame)\n draw_text(self._help, (205, 10), 'Instructions:', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 45), 'Up - Rotate', 10, self._white,\n self._pygame)\n draw_text(self._help, (20, 75), 'Left - Move Left', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 105), 'Right - Move Right', 10, self\n ._white, self._pygame)\n draw_text(self._help, (20, 135), 'Down - Move Down', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 165), 'Space - Drop', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 195), 'Esc - Pause', 10, self._white,\n self._pygame)\n text = config.instructions\n rect = self._pygame.Rect(0, 0, 190, 190)\n instructions = render_textrect(text, 8, rect, self._white, (0, \n 0, 0), 0, self._pygame)\n self._help.blit(instructions, (210, 45))\n",
"step-3": "<mask token>\n\n\nclass View:\n \"\"\" The view class which handles the visual component of the application.\n \"\"\"\n\n def __init__(self, pygame, master):\n \"\"\" Set up and initialise the view. Does not start the display. \"\"\"\n self._pygame = pygame\n self._master = master\n self._display = self._pygame.display\n self._interface = None\n self._state = None\n self._cycle_colour = 200, 0, 0\n self._white = 255, 255, 255\n\n def start(self):\n \"\"\" Start the display. \"\"\"\n self._screen = self._display.set_mode((640, 480))\n self._display.set_caption('PolyominOhs!')\n self._pygame.mouse.set_visible(0)\n\n def update(self):\n \"\"\" Update the screen. \"\"\"\n h, s, v = rgb2hsv(self._cycle_colour)\n h += 1\n self._cycle_colour = hsv2rgb((h, s, v))\n if self._state == config.GS_LOADING:\n self._screen.blit(self._background, (0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES, config.GS_MENU_HELP]:\n selected = self._interface.get_selection()\n settings = {config.MENU_LEVEL: str(self._interface.get_level()),\n config.MENU_ORDER: str(self._interface.get_order()), config\n .MENU_SFX: self._interface.get_sfx(), config.MENU_MUSIC:\n self._interface.get_music()}\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (120, 25), 'PolyominOhs!', 36, self.\n _cycle_colour, self._pygame, True)\n for button in self._buttons.items():\n if button[0] == selected:\n button[1].draw(self._screen, config.TXT_HOVER, self.\n _pygame, self._cycle_colour)\n else:\n button[1].draw(self._screen, config.TXT_NORMAL, self.\n _pygame)\n for radio in self._radios.items():\n if radio[0] == selected:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_HOVER, self._cycle_colour, self._pygame)\n else:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_NORMAL, self._cycle_colour, self._pygame)\n order = self._interface.get_order()\n ominoes = self._master._ominoes[order - 1]\n n = self._interface.get_random_omino()\n shape = ominoes[0][n]\n draw_polyomino(self._screen, (400, 160), shape, 21, self.\n _cycle_colour, self._pygame)\n if self._state == config.GS_MENU_HIGHSCORES:\n draw_border(self._highscores, self._cycle_colour, self._pygame)\n for i, highscore in enumerate(self._master.get_highscores()):\n name, score = highscore\n name = name.replace('_', ' ')\n if self._interface.get_highscore_highlight() == i:\n colour = self._cycle_colour\n else:\n colour = self._white\n draw_text(self._highscores, (20, 10 + (i + 1) * 25),\n name, 10, colour, self._pygame)\n draw_text(self._highscores, (175, 10 + (i + 1) * 25),\n str(score), 10, colour, self._pygame)\n self._screen.blit(self._highscores, (200, 100))\n if self._state == config.GS_MENU_ENTER_HIGHSCORE:\n self._enterhighscore.fill((0, 0, 0))\n draw_border(self._enterhighscore, self._cycle_colour, self.\n _pygame)\n draw_text(self._enterhighscore, (60, 20), 'Highscore!', 14,\n self._white, self._pygame)\n draw_text(self._enterhighscore, (20, 60),\n 'Please enter your name:', 10, self._white, self._pygame)\n draw_text(self._enterhighscore, (70, 170), 'Press return', \n 10, self._white, self._pygame)\n self._name_entry.update(self._interface.get_highscore_name())\n self._name_entry.draw(self._enterhighscore, self._interface\n .get_name_selected(), self._cycle_colour, self._pygame)\n self._screen.blit(self._enterhighscore, (200, 120))\n if self._state == config.GS_MENU_HELP:\n draw_border(self._help, self._cycle_colour, self._pygame)\n self._screen.blit(self._help, (115, 120))\n elif self._state in [config.GS_GAME, config.GS_GAME_PAUSED, config.\n GS_GAME_OVER]:\n score = str(self._interface.get_score())\n lines = str(self._interface.get_lines_cleared())\n next_omino = self._interface.get_next_omino()\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (445, 155), score, 10, self._white,\n self._pygame)\n draw_text(self._screen, (445, 215), lines, 10, self._white,\n self._pygame)\n if self._state == config.GS_GAME:\n draw_polyomino(self._screen, (440, 290), next_omino.\n get_shape(0), 21, next_omino.get_colour(), self._pygame)\n grid = self._interface.get_field().get_complete_grid()\n self._grid.fill((0, 0, 0))\n draw_border(self._grid, self._cycle_colour, self._pygame)\n if self._state == config.GS_GAME:\n size = config.sizes[self._interface.get_order()]\n draw_grid(self._grid, (5, 5), grid, size, self._pygame)\n elif self._state == config.GS_GAME_PAUSED:\n draw_text(self._grid, (30, 115), 'Game Paused', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (40, 185), 'Press y to quit', 10,\n self._white, self._pygame)\n draw_text(self._grid, (30, 215), 'or esc to resume', 10,\n self._white, self._pygame)\n elif self._state == config.GS_GAME_OVER:\n draw_text(self._grid, (42, 115), 'Game Over', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (47, 185), 'Press return', 10, self.\n _white, self._pygame)\n self._screen.blit(self._grid, (60, 30))\n self._display.flip()\n\n def change_state(self, state, interface=None):\n \"\"\" Change the state of the application and get the new interface\n (if given). Set up graphics for the new state if required.\n \n change_state(int, Menu/Game) -> void\n \"\"\"\n self._state = state\n if interface != None:\n self._interface = interface\n if self._state == config.GS_LOADING:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (180, 180), 'Loading...', 36, self.\n _white, self._pygame)\n elif self._state == config.GS_GAME:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (410, 130), 'Score:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (410, 190), 'Lines Cleared:', 10,\n self._white, self._pygame)\n next_text = 'Next ' + config.names[self._interface.get_order()\n ].title() + ':'\n draw_text(self._background, (410, 250), next_text, 10, self.\n _white, self._pygame)\n w = 210 + 10 - self._interface.get_field().get_size()[0] + 1\n h = 420 + 10 - self._interface.get_field().get_size()[1] + 1\n self._grid = self._pygame.Surface((w, h))\n self._grid = self._grid.convert()\n self._grid.fill((0, 0, 0))\n self._grid.set_colorkey((0, 0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES]:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (110, 300), 'Settings:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (130, 340), 'Difficulty Level:', 10,\n self._white, self._pygame)\n draw_text(self._background, (130, 400), 'Polyomino Order:', 10,\n self._white, self._pygame)\n draw_text(self._background, (370, 300), 'Audio:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (400, 340), 'Sound Effects:', 10,\n self._white, self._pygame)\n draw_text(self._background, (400, 400), 'Music:', 10, self.\n _white, self._pygame)\n self._buttons = {}\n start_game_button = Button('Start Game', 10, (90, 150))\n self._buttons.update({config.MENU_START: start_game_button})\n view_highscores_button = Button('View Highscores', 10, (90, 180))\n self._buttons.update({config.MENU_HIGHSCORES:\n view_highscores_button})\n help_button = Button('Help', 10, (90, 210))\n self._buttons.update({config.MENU_HELP: help_button})\n quit_button = Button('Quit', 10, (90, 240))\n self._buttons.update({config.MENU_QUIT: quit_button})\n self._radios = {}\n level_selection = Radio_Selection([str(n + 1) for n in range(9)\n ], 10, (160, 365))\n self._radios.update({config.MENU_LEVEL: level_selection})\n order_selection = Radio_Selection([str(n + 1) for n in range(6)\n ], 10, (160, 425))\n self._radios.update({config.MENU_ORDER: order_selection})\n sfx_selection = Radio_Selection(['On', 'Off'], 10, (435, 365))\n self._radios.update({config.MENU_SFX: sfx_selection})\n music_selection = Radio_Selection(['On', 'Off'], 10, (435, 425))\n self._radios.update({config.MENU_MUSIC: music_selection})\n self._highscores = self._pygame.Surface((250, 300))\n self._highscores = self._highscores.convert()\n self._highscores.fill((0, 0, 0))\n draw_text(self._highscores, (15, 10), 'Highscores:', 10, self.\n _white, self._pygame)\n self._enterhighscore = self._pygame.Surface((250, 210))\n self._enterhighscore = self._enterhighscore.convert()\n self._enterhighscore.fill((0, 0, 0))\n self._name_entry = Text_Entry(3, ['A', 'A', 'A'], 20, (85, 105))\n self._help = self._pygame.Surface((410, 240))\n self._help = self._help.convert()\n self._help.fill((0, 0, 0))\n draw_text(self._help, (15, 10), 'Controls:', 10, self._white,\n self._pygame)\n draw_text(self._help, (205, 10), 'Instructions:', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 45), 'Up - Rotate', 10, self._white,\n self._pygame)\n draw_text(self._help, (20, 75), 'Left - Move Left', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 105), 'Right - Move Right', 10, self\n ._white, self._pygame)\n draw_text(self._help, (20, 135), 'Down - Move Down', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 165), 'Space - Drop', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 195), 'Esc - Pause', 10, self._white,\n self._pygame)\n text = config.instructions\n rect = self._pygame.Rect(0, 0, 190, 190)\n instructions = render_textrect(text, 8, rect, self._white, (0, \n 0, 0), 0, self._pygame)\n self._help.blit(instructions, (210, 45))\n",
"step-4": "<mask token>\nimport random\nimport config\nfrom graphics import *\n\n\nclass View:\n \"\"\" The view class which handles the visual component of the application.\n \"\"\"\n\n def __init__(self, pygame, master):\n \"\"\" Set up and initialise the view. Does not start the display. \"\"\"\n self._pygame = pygame\n self._master = master\n self._display = self._pygame.display\n self._interface = None\n self._state = None\n self._cycle_colour = 200, 0, 0\n self._white = 255, 255, 255\n\n def start(self):\n \"\"\" Start the display. \"\"\"\n self._screen = self._display.set_mode((640, 480))\n self._display.set_caption('PolyominOhs!')\n self._pygame.mouse.set_visible(0)\n\n def update(self):\n \"\"\" Update the screen. \"\"\"\n h, s, v = rgb2hsv(self._cycle_colour)\n h += 1\n self._cycle_colour = hsv2rgb((h, s, v))\n if self._state == config.GS_LOADING:\n self._screen.blit(self._background, (0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES, config.GS_MENU_HELP]:\n selected = self._interface.get_selection()\n settings = {config.MENU_LEVEL: str(self._interface.get_level()),\n config.MENU_ORDER: str(self._interface.get_order()), config\n .MENU_SFX: self._interface.get_sfx(), config.MENU_MUSIC:\n self._interface.get_music()}\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (120, 25), 'PolyominOhs!', 36, self.\n _cycle_colour, self._pygame, True)\n for button in self._buttons.items():\n if button[0] == selected:\n button[1].draw(self._screen, config.TXT_HOVER, self.\n _pygame, self._cycle_colour)\n else:\n button[1].draw(self._screen, config.TXT_NORMAL, self.\n _pygame)\n for radio in self._radios.items():\n if radio[0] == selected:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_HOVER, self._cycle_colour, self._pygame)\n else:\n radio[1].draw(self._screen, settings[radio[0]], config.\n TXT_NORMAL, self._cycle_colour, self._pygame)\n order = self._interface.get_order()\n ominoes = self._master._ominoes[order - 1]\n n = self._interface.get_random_omino()\n shape = ominoes[0][n]\n draw_polyomino(self._screen, (400, 160), shape, 21, self.\n _cycle_colour, self._pygame)\n if self._state == config.GS_MENU_HIGHSCORES:\n draw_border(self._highscores, self._cycle_colour, self._pygame)\n for i, highscore in enumerate(self._master.get_highscores()):\n name, score = highscore\n name = name.replace('_', ' ')\n if self._interface.get_highscore_highlight() == i:\n colour = self._cycle_colour\n else:\n colour = self._white\n draw_text(self._highscores, (20, 10 + (i + 1) * 25),\n name, 10, colour, self._pygame)\n draw_text(self._highscores, (175, 10 + (i + 1) * 25),\n str(score), 10, colour, self._pygame)\n self._screen.blit(self._highscores, (200, 100))\n if self._state == config.GS_MENU_ENTER_HIGHSCORE:\n self._enterhighscore.fill((0, 0, 0))\n draw_border(self._enterhighscore, self._cycle_colour, self.\n _pygame)\n draw_text(self._enterhighscore, (60, 20), 'Highscore!', 14,\n self._white, self._pygame)\n draw_text(self._enterhighscore, (20, 60),\n 'Please enter your name:', 10, self._white, self._pygame)\n draw_text(self._enterhighscore, (70, 170), 'Press return', \n 10, self._white, self._pygame)\n self._name_entry.update(self._interface.get_highscore_name())\n self._name_entry.draw(self._enterhighscore, self._interface\n .get_name_selected(), self._cycle_colour, self._pygame)\n self._screen.blit(self._enterhighscore, (200, 120))\n if self._state == config.GS_MENU_HELP:\n draw_border(self._help, self._cycle_colour, self._pygame)\n self._screen.blit(self._help, (115, 120))\n elif self._state in [config.GS_GAME, config.GS_GAME_PAUSED, config.\n GS_GAME_OVER]:\n score = str(self._interface.get_score())\n lines = str(self._interface.get_lines_cleared())\n next_omino = self._interface.get_next_omino()\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (445, 155), score, 10, self._white,\n self._pygame)\n draw_text(self._screen, (445, 215), lines, 10, self._white,\n self._pygame)\n if self._state == config.GS_GAME:\n draw_polyomino(self._screen, (440, 290), next_omino.\n get_shape(0), 21, next_omino.get_colour(), self._pygame)\n grid = self._interface.get_field().get_complete_grid()\n self._grid.fill((0, 0, 0))\n draw_border(self._grid, self._cycle_colour, self._pygame)\n if self._state == config.GS_GAME:\n size = config.sizes[self._interface.get_order()]\n draw_grid(self._grid, (5, 5), grid, size, self._pygame)\n elif self._state == config.GS_GAME_PAUSED:\n draw_text(self._grid, (30, 115), 'Game Paused', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (40, 185), 'Press y to quit', 10,\n self._white, self._pygame)\n draw_text(self._grid, (30, 215), 'or esc to resume', 10,\n self._white, self._pygame)\n elif self._state == config.GS_GAME_OVER:\n draw_text(self._grid, (42, 115), 'Game Over', 14, self.\n _cycle_colour, self._pygame, True)\n draw_text(self._grid, (47, 185), 'Press return', 10, self.\n _white, self._pygame)\n self._screen.blit(self._grid, (60, 30))\n self._display.flip()\n\n def change_state(self, state, interface=None):\n \"\"\" Change the state of the application and get the new interface\n (if given). Set up graphics for the new state if required.\n \n change_state(int, Menu/Game) -> void\n \"\"\"\n self._state = state\n if interface != None:\n self._interface = interface\n if self._state == config.GS_LOADING:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (180, 180), 'Loading...', 36, self.\n _white, self._pygame)\n elif self._state == config.GS_GAME:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (410, 130), 'Score:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (410, 190), 'Lines Cleared:', 10,\n self._white, self._pygame)\n next_text = 'Next ' + config.names[self._interface.get_order()\n ].title() + ':'\n draw_text(self._background, (410, 250), next_text, 10, self.\n _white, self._pygame)\n w = 210 + 10 - self._interface.get_field().get_size()[0] + 1\n h = 420 + 10 - self._interface.get_field().get_size()[1] + 1\n self._grid = self._pygame.Surface((w, h))\n self._grid = self._grid.convert()\n self._grid.fill((0, 0, 0))\n self._grid.set_colorkey((0, 0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES]:\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (110, 300), 'Settings:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (130, 340), 'Difficulty Level:', 10,\n self._white, self._pygame)\n draw_text(self._background, (130, 400), 'Polyomino Order:', 10,\n self._white, self._pygame)\n draw_text(self._background, (370, 300), 'Audio:', 10, self.\n _white, self._pygame)\n draw_text(self._background, (400, 340), 'Sound Effects:', 10,\n self._white, self._pygame)\n draw_text(self._background, (400, 400), 'Music:', 10, self.\n _white, self._pygame)\n self._buttons = {}\n start_game_button = Button('Start Game', 10, (90, 150))\n self._buttons.update({config.MENU_START: start_game_button})\n view_highscores_button = Button('View Highscores', 10, (90, 180))\n self._buttons.update({config.MENU_HIGHSCORES:\n view_highscores_button})\n help_button = Button('Help', 10, (90, 210))\n self._buttons.update({config.MENU_HELP: help_button})\n quit_button = Button('Quit', 10, (90, 240))\n self._buttons.update({config.MENU_QUIT: quit_button})\n self._radios = {}\n level_selection = Radio_Selection([str(n + 1) for n in range(9)\n ], 10, (160, 365))\n self._radios.update({config.MENU_LEVEL: level_selection})\n order_selection = Radio_Selection([str(n + 1) for n in range(6)\n ], 10, (160, 425))\n self._radios.update({config.MENU_ORDER: order_selection})\n sfx_selection = Radio_Selection(['On', 'Off'], 10, (435, 365))\n self._radios.update({config.MENU_SFX: sfx_selection})\n music_selection = Radio_Selection(['On', 'Off'], 10, (435, 425))\n self._radios.update({config.MENU_MUSIC: music_selection})\n self._highscores = self._pygame.Surface((250, 300))\n self._highscores = self._highscores.convert()\n self._highscores.fill((0, 0, 0))\n draw_text(self._highscores, (15, 10), 'Highscores:', 10, self.\n _white, self._pygame)\n self._enterhighscore = self._pygame.Surface((250, 210))\n self._enterhighscore = self._enterhighscore.convert()\n self._enterhighscore.fill((0, 0, 0))\n self._name_entry = Text_Entry(3, ['A', 'A', 'A'], 20, (85, 105))\n self._help = self._pygame.Surface((410, 240))\n self._help = self._help.convert()\n self._help.fill((0, 0, 0))\n draw_text(self._help, (15, 10), 'Controls:', 10, self._white,\n self._pygame)\n draw_text(self._help, (205, 10), 'Instructions:', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 45), 'Up - Rotate', 10, self._white,\n self._pygame)\n draw_text(self._help, (20, 75), 'Left - Move Left', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 105), 'Right - Move Right', 10, self\n ._white, self._pygame)\n draw_text(self._help, (20, 135), 'Down - Move Down', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 165), 'Space - Drop', 10, self.\n _white, self._pygame)\n draw_text(self._help, (20, 195), 'Esc - Pause', 10, self._white,\n self._pygame)\n text = config.instructions\n rect = self._pygame.Rect(0, 0, 190, 190)\n instructions = render_textrect(text, 8, rect, self._white, (0, \n 0, 0), 0, self._pygame)\n self._help.blit(instructions, (210, 45))\n",
"step-5": "\n\"\"\" view.py: Contains the View class. \"\"\"\n\n\nimport random\n\nimport config\nfrom graphics import *\n\n\nclass View:\n \n \"\"\" The view class which handles the visual component of the application.\n \"\"\"\n \n def __init__(self, pygame, master):\n \"\"\" Set up and initialise the view. Does not start the display. \"\"\"\n \n self._pygame = pygame\n self._master = master\n self._display = self._pygame.display\n self._interface = None\n self._state = None\n self._cycle_colour = (200, 0, 0)\n self._white = (255, 255, 255)\n \n def start(self):\n \"\"\" Start the display. \"\"\"\n \n self._screen = self._display.set_mode((640, 480))\n self._display.set_caption('PolyominOhs!')\n self._pygame.mouse.set_visible(0)\n \n def update(self):\n \"\"\" Update the screen. \"\"\"\n \n # Constantly cycle through a colour\n h, s, v = rgb2hsv(self._cycle_colour)\n h += 1\n self._cycle_colour = hsv2rgb((h, s, v))\n \n if self._state == config.GS_LOADING:\n self._screen.blit(self._background, (0, 0))\n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES, config.GS_MENU_HELP]:\n \n # Get current selections\n selected = self._interface.get_selection()\n settings = {config.MENU_LEVEL: str(self._interface.get_level()),\n config.MENU_ORDER: str(self._interface.get_order()),\n config.MENU_SFX: self._interface.get_sfx(),\n config.MENU_MUSIC: self._interface.get_music()}\n \n # Background and title\n self._screen.blit(self._background, (0, 0))\n draw_text(self._screen, (120, 25), 'PolyominOhs!', 36,\n self._cycle_colour, self._pygame, True)\n \n # Buttons\n for button in self._buttons.items():\n if button[0] == selected:\n button[1].draw(self._screen, config.TXT_HOVER,\n self._pygame, self._cycle_colour)\n else:\n button[1].draw(self._screen, config.TXT_NORMAL,\n self._pygame)\n \n # Radio Selections\n for radio in self._radios.items():\n if radio[0] == selected:\n radio[1].draw(self._screen, settings[radio[0]],\n config.TXT_HOVER, self._cycle_colour,\n self._pygame)\n else:\n radio[1].draw(self._screen, settings[radio[0]],\n config.TXT_NORMAL, self._cycle_colour,\n self._pygame)\n \n # Random polyomino\n order = self._interface.get_order()\n ominoes = self._master._ominoes[order - 1]\n n = self._interface.get_random_omino()\n shape = ominoes[0][n]\n draw_polyomino(self._screen, (400, 160), shape, 21,\n self._cycle_colour, self._pygame)\n \n # Highscores\n if self._state == config.GS_MENU_HIGHSCORES:\n draw_border(self._highscores, self._cycle_colour, self._pygame)\n for i, highscore in enumerate(self._master.get_highscores()):\n name, score = highscore\n name = name.replace('_', ' ')\n if self._interface.get_highscore_highlight() == i:\n colour = self._cycle_colour\n else:\n colour = self._white\n draw_text(self._highscores, (20, 10 + (i + 1) * 25), name,\n 10, colour, self._pygame)\n draw_text(self._highscores, (175, 10 + (i + 1) * 25),\n str(score), 10, colour, self._pygame)\n self._screen.blit(self._highscores, (200, 100))\n \n # Enter highscore\n if self._state == config.GS_MENU_ENTER_HIGHSCORE:\n self._enterhighscore.fill((0, 0, 0))\n draw_border(self._enterhighscore, self._cycle_colour,\n self._pygame)\n draw_text(self._enterhighscore, (60, 20), 'Highscore!', 14,\n self._white, self._pygame)\n draw_text(self._enterhighscore, (20, 60),\n 'Please enter your name:', 10, self._white,\n self._pygame)\n draw_text(self._enterhighscore, (70, 170), 'Press return', 10,\n self._white, self._pygame)\n self._name_entry.update(self._interface.get_highscore_name())\n self._name_entry.draw(self._enterhighscore,\n self._interface.get_name_selected(),\n self._cycle_colour, self._pygame)\n self._screen.blit(self._enterhighscore, (200, 120))\n \n # Help\n if self._state == config.GS_MENU_HELP:\n draw_border(self._help, self._cycle_colour, self._pygame)\n self._screen.blit(self._help, (115, 120))\n \n elif self._state in [config.GS_GAME, config.GS_GAME_PAUSED,\n config.GS_GAME_OVER]:\n \n # Get current information\n score = str(self._interface.get_score())\n lines = str(self._interface.get_lines_cleared())\n next_omino = self._interface.get_next_omino()\n \n self._screen.blit(self._background, (0, 0))\n \n # Score and number of lines cleared\n draw_text(self._screen, (445, 155), score, 10, self._white,\n self._pygame)\n draw_text(self._screen, (445, 215), lines, 10, self._white,\n self._pygame)\n \n # Draw next polyomino\n if self._state == config.GS_GAME:\n draw_polyomino(self._screen, (440, 290), next_omino.get_shape(0),\n 21, next_omino.get_colour(), self._pygame)\n \n # Draw grid of blocks (or pause or game over screen)\n grid = self._interface.get_field().get_complete_grid()\n self._grid.fill((0, 0, 0))\n draw_border(self._grid, self._cycle_colour, self._pygame)\n \n if self._state == config.GS_GAME:\n size = config.sizes[self._interface.get_order()]\n draw_grid(self._grid, (5, 5), grid, size, self._pygame)\n elif self._state == config.GS_GAME_PAUSED:\n draw_text(self._grid, (30, 115), 'Game Paused', 14,\n self._cycle_colour, self._pygame, True)\n draw_text(self._grid, (40, 185), 'Press y to quit', 10,\n self._white, self._pygame)\n draw_text(self._grid, (30, 215), 'or esc to resume', 10,\n self._white, self._pygame)\n elif self._state == config.GS_GAME_OVER:\n draw_text(self._grid, (42, 115), 'Game Over', 14,\n self._cycle_colour, self._pygame, True)\n draw_text(self._grid, (47, 185), 'Press return', 10,\n self._white, self._pygame)\n \n self._screen.blit(self._grid, (60, 30))\n \n self._display.flip()\n \n def change_state(self, state, interface=None):\n \"\"\" Change the state of the application and get the new interface\n (if given). Set up graphics for the new state if required.\n \n change_state(int, Menu/Game) -> void\n \"\"\"\n \n self._state = state\n if interface != None:\n self._interface = interface\n \n if self._state == config.GS_LOADING:\n \n # Background with loading text\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n draw_text(self._background, (180, 180), 'Loading...', 36,\n self._white, self._pygame)\n \n elif self._state == config.GS_GAME:\n \n # Background with static text\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n \n draw_text(self._background, (410, 130), 'Score:', 10,\n self._white, self._pygame)\n draw_text(self._background, (410, 190), 'Lines Cleared:', 10,\n self._white, self._pygame)\n \n next_text = 'Next ' + \\\n config.names[self._interface.get_order()].title() + ':'\n draw_text(self._background, (410, 250), next_text, 10,\n self._white, self._pygame)\n \n # Grid\n w = 210 + 10 - self._interface.get_field().get_size()[0] + 1\n h = 420 + 10 - self._interface.get_field().get_size()[1] + 1\n self._grid = self._pygame.Surface((w, h))\n self._grid = self._grid.convert()\n self._grid.fill((0, 0, 0))\n self._grid.set_colorkey((0, 0, 0))\n \n elif self._state in [config.GS_MENU, config.GS_MENU_ENTER_HIGHSCORE,\n config.GS_MENU_HIGHSCORES]:\n \n # Background with static text\n self._background = self._pygame.Surface(self._screen.get_size())\n self._background = self._background.convert()\n self._background.fill((0, 0, 0))\n \n draw_text(self._background, (110, 300), 'Settings:', 10,\n self._white, self._pygame)\n draw_text(self._background, (130, 340), 'Difficulty Level:', 10,\n self._white, self._pygame)\n draw_text(self._background, (130, 400), 'Polyomino Order:', 10,\n self._white, self._pygame)\n \n draw_text(self._background, (370, 300), 'Audio:', 10,\n self._white, self._pygame)\n draw_text(self._background, (400, 340), 'Sound Effects:', 10,\n self._white, self._pygame)\n draw_text(self._background, (400, 400), 'Music:', 10,\n self._white, self._pygame)\n \n # Buttons\n self._buttons = {}\n start_game_button = Button('Start Game', 10, (90, 150))\n self._buttons.update({config.MENU_START: start_game_button})\n view_highscores_button = Button('View Highscores', 10, (90, 180))\n self._buttons.update({config.MENU_HIGHSCORES: view_highscores_button})\n help_button = Button('Help', 10, (90, 210))\n self._buttons.update({config.MENU_HELP: help_button})\n quit_button = Button('Quit', 10, (90, 240))\n self._buttons.update({config.MENU_QUIT: quit_button})\n \n # Radio Selections\n self._radios = {}\n level_selection = Radio_Selection([str(n + 1) for n in range(9)],\n 10, (160, 365))\n self._radios.update({config.MENU_LEVEL: level_selection})\n order_selection = Radio_Selection([str(n + 1) for n in range(6)],\n 10, (160, 425))\n self._radios.update({config.MENU_ORDER: order_selection})\n sfx_selection = Radio_Selection(['On', 'Off'], 10, (435, 365))\n self._radios.update({config.MENU_SFX: sfx_selection})\n music_selection = Radio_Selection(['On', 'Off'], 10, (435, 425))\n self._radios.update({config.MENU_MUSIC: music_selection})\n \n # Highscores Screen\n self._highscores = self._pygame.Surface((250, 300))\n self._highscores = self._highscores.convert()\n self._highscores.fill((0, 0, 0))\n \n draw_text(self._highscores, (15, 10), 'Highscores:', 10,\n self._white, self._pygame)\n \n # Enter highscore name screen\n self._enterhighscore = self._pygame.Surface((250, 210))\n self._enterhighscore = self._enterhighscore.convert()\n self._enterhighscore.fill((0, 0, 0))\n self._name_entry = Text_Entry(3, ['A', 'A', 'A'], 20, (85, 105))\n \n # Help Screen\n self._help = self._pygame.Surface((410, 240))\n self._help = self._help.convert()\n self._help.fill((0, 0, 0))\n \n draw_text(self._help, (15, 10), 'Controls:', 10, self._white,\n self._pygame)\n draw_text(self._help, (205, 10), 'Instructions:', 10,\n self._white, self._pygame)\n \n draw_text(self._help, (20, 45), 'Up - Rotate', 10, self._white,\n self._pygame)\n draw_text(self._help, (20, 75), 'Left - Move Left', 10,\n self._white, self._pygame)\n draw_text(self._help, (20, 105), 'Right - Move Right', 10,\n self._white, self._pygame)\n draw_text(self._help, (20, 135), 'Down - Move Down', 10,\n self._white, self._pygame)\n draw_text(self._help, (20, 165), 'Space - Drop', 10, self._white,\n self._pygame)\n draw_text(self._help, (20, 195), 'Esc - Pause', 10, self._white,\n self._pygame)\n \n text = config.instructions\n rect = self._pygame.Rect(0, 0, 190, 190)\n instructions = render_textrect(text, 8, rect, self._white,\n (0, 0, 0), 0, self._pygame)\n self._help.blit(instructions, (210, 45))\n ",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
def car(env):
while True:
print('The car will start parking at: ', env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print('The car will start driving at: ', env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def car(env):
while True:
print('The car will start parking at: ', env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print('The car will start driving at: ', env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
<|reserved_special_token_0|>
env.process(car(env))
env.run(until=20)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def car(env):
while True:
print('The car will start parking at: ', env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print('The car will start driving at: ', env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
env = simpy.Environment()
env.process(car(env))
env.run(until=20)
<|reserved_special_token_1|>
import simpy
def car(env):
while True:
print('The car will start parking at: ', env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print('The car will start driving at: ', env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
env = simpy.Environment()
env.process(car(env))
env.run(until=20)
<|reserved_special_token_1|>
#processes are described by generator functions
#during the lifetime of a process, the process function(generator function)
#creates events and yields them
#when a process yields an event, it gets suspended
#Simpy resumes the process when the event is triggered
#multiple processes waiting on the same event is resumed in the same order
#it yielded the event
import simpy
def car(env):
# i = 0
# while i<=10:
while True:
print("The car will start parking at: ",env.now)
parking_timeout = 5
yield env.timeout(parking_timeout)
print("The car will start driving at: ",env.now)
driving_timeout = 2
yield env.timeout(driving_timeout)
# if i == 10:
# print("the car is done moving")
# yield env.timeout(1)
# i += 1
env = simpy.Environment()
env.process(car(env)) #the generator function creates the process called car
#env.run()
env.run(until=20)
|
flexible
|
{
"blob_id": "892eb8d1802b01c035993232cc80c710211ab102",
"index": 802,
"step-1": "<mask token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\n<mask token>\nenv.process(car(env))\nenv.run(until=20)\n",
"step-3": "<mask token>\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\nenv = simpy.Environment()\nenv.process(car(env))\nenv.run(until=20)\n",
"step-4": "import simpy\n\n\ndef car(env):\n while True:\n print('The car will start parking at: ', env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n print('The car will start driving at: ', env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n\nenv = simpy.Environment()\nenv.process(car(env))\nenv.run(until=20)\n",
"step-5": "#processes are described by generator functions\n#during the lifetime of a process, the process function(generator function) \n#creates events and yields them\n\n#when a process yields an event, it gets suspended\n#Simpy resumes the process when the event is triggered\n#multiple processes waiting on the same event is resumed in the same order\n#it yielded the event\n\nimport simpy\n\ndef car(env):\n # i = 0\n # while i<=10:\n while True:\n print(\"The car will start parking at: \",env.now)\n parking_timeout = 5\n yield env.timeout(parking_timeout)\n\n print(\"The car will start driving at: \",env.now)\n driving_timeout = 2\n yield env.timeout(driving_timeout)\n\n # if i == 10:\n # print(\"the car is done moving\")\n # yield env.timeout(1)\n # i += 1\n\n\nenv = simpy.Environment()\nenv.process(car(env)) #the generator function creates the process called car\n#env.run()\nenv.run(until=20)\n\n\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(decoded_predictions)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
model = ResNet50(weights='imagenet', include_top=True)
img_input = image.load_img('my_picture.jpg', target_size=(224, 224))
img_input = image.img_to_array(img_input)
img_input = preprocess_input(img_input[np.newaxis, ...])
preds = model.predict(img_input)
decoded_predictions = decode_predictions(preds, top=10)[0]
print(decoded_predictions)
<|reserved_special_token_1|>
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions
import numpy as np
model = ResNet50(weights='imagenet', include_top=True)
img_input = image.load_img('my_picture.jpg', target_size=(224, 224))
img_input = image.img_to_array(img_input)
img_input = preprocess_input(img_input[np.newaxis, ...])
preds = model.predict(img_input)
decoded_predictions = decode_predictions(preds, top=10)[0]
print(decoded_predictions)
<|reserved_special_token_1|>
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions
import numpy as np
model = ResNet50(weights='imagenet', # Learned weights on imagenet
include_top=True)
img_input = image.load_img('my_picture.jpg', target_size=(224, 224))
img_input = image.img_to_array(img_input)
img_input = preprocess_input(img_input[np.newaxis, ...])
preds = model.predict(img_input)
decoded_predictions = decode_predictions(preds, top=10)[0]
print(decoded_predictions)
|
flexible
|
{
"blob_id": "1af6e66c19078a9ee971f608daa93247911d8406",
"index": 5881,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(decoded_predictions)\n",
"step-3": "<mask token>\nmodel = ResNet50(weights='imagenet', include_top=True)\nimg_input = image.load_img('my_picture.jpg', target_size=(224, 224))\nimg_input = image.img_to_array(img_input)\nimg_input = preprocess_input(img_input[np.newaxis, ...])\npreds = model.predict(img_input)\ndecoded_predictions = decode_predictions(preds, top=10)[0]\nprint(decoded_predictions)\n",
"step-4": "from tensorflow.keras.applications.resnet50 import ResNet50\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions\nimport numpy as np\nmodel = ResNet50(weights='imagenet', include_top=True)\nimg_input = image.load_img('my_picture.jpg', target_size=(224, 224))\nimg_input = image.img_to_array(img_input)\nimg_input = preprocess_input(img_input[np.newaxis, ...])\npreds = model.predict(img_input)\ndecoded_predictions = decode_predictions(preds, top=10)[0]\nprint(decoded_predictions)\n",
"step-5": "from tensorflow.keras.applications.resnet50 import ResNet50\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions\nimport numpy as np\n\nmodel = ResNet50(weights='imagenet', # Learned weights on imagenet\n include_top=True)\n\nimg_input = image.load_img('my_picture.jpg', target_size=(224, 224))\nimg_input = image.img_to_array(img_input)\nimg_input = preprocess_input(img_input[np.newaxis, ...])\n\npreds = model.predict(img_input)\ndecoded_predictions = decode_predictions(preds, top=10)[0]\n\nprint(decoded_predictions)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# 可写函数说明
def sum(arg1, arg2):
# 返回2个参数的和."
total = arg1 + arg2
print "函数内 : ", total
return total;
# 调用sum函数
total = sum(10, 20);
def nop():
pass
a = nop();
|
normal
|
{
"blob_id": "9761070a75b043f6cc9e6134e09810b215ccd0c0",
"index": 6430,
"step-1": "#!/usr/bin/python\n# -*- coding: UTF-8 -*-\n\n# 可写函数说明\ndef sum(arg1, arg2):\n # 返回2个参数的和.\"\n total = arg1 + arg2\n print \"函数内 : \", total\n return total;\n\n\n# 调用sum函数\ntotal = sum(10, 20);\n\ndef nop():\n pass\n\na = nop();",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
something1
x = session.query(x).filter(y).count()
something2
y = session.query(
models.User, models.X,
).filter(
models.User.time > start_time,
models.User.id == user_id,
).count()
def something3():
x = session.query(
models.Review,
).filter(
models.Review.time < end_time,
).count()
something4
x = session.query(x, y).filter(bla).count()
x = session.query(x.X, y).filter(y > user_id).count()
x = session.query(
x.X, y.Y
).filter(x.X == 5).count()
something5
|
normal
|
{
"blob_id": "5b91b7025b0e574d45f95a0585128018d83c17ea",
"index": 563,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef something3():\n x = session.query(models.Review).filter(models.Review.time < end_time\n ).count()\n\n\n<mask token>\n",
"step-3": "something1\n<mask token>\nsomething2\n<mask token>\n\n\ndef something3():\n x = session.query(models.Review).filter(models.Review.time < end_time\n ).count()\n\n\nsomething4\n<mask token>\nsomething5\n",
"step-4": "something1\nx = session.query(x).filter(y).count()\nsomething2\ny = session.query(models.User, models.X).filter(models.User.time >\n start_time, models.User.id == user_id).count()\n\n\ndef something3():\n x = session.query(models.Review).filter(models.Review.time < end_time\n ).count()\n\n\nsomething4\nx = session.query(x, y).filter(bla).count()\nx = session.query(x.X, y).filter(y > user_id).count()\nx = session.query(x.X, y.Y).filter(x.X == 5).count()\nsomething5\n",
"step-5": "something1\nx = session.query(x).filter(y).count()\nsomething2\ny = session.query(\n models.User, models.X,\n).filter(\n models.User.time > start_time,\n models.User.id == user_id,\n).count()\ndef something3():\n x = session.query(\n models.Review,\n ).filter(\n models.Review.time < end_time,\n ).count()\nsomething4\nx = session.query(x, y).filter(bla).count()\nx = session.query(x.X, y).filter(y > user_id).count()\nx = session.query(\n x.X, y.Y\n).filter(x.X == 5).count()\nsomething5\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
<|reserved_special_token_0|>
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
<|reserved_special_token_0|>
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<|reserved_special_token_0|>
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags,
self.questionmarks)
self.create_menu()
font = pygame.font.SysFont('times new roman', 25)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:
self.mouse_x, self.mouse_y = event.pos
right_click = True
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH
b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *
CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont('times new roman', 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None:
if not self.revealed_cells[cell_x][cell_y
] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
if not self.revealed_cells[cell_x][cell_y
] and left_click and not self.game_over:
if not self.flags[cell_x][cell_y
] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE:
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR:
self.reveal_cells(cell_x, cell_y, self.board,
self.revealed_cells, self.flags, self.
questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True
self.draw_board(self.board, self.revealed_cells,
self.flags, self.questionmarks)
if not self.revealed_cells[cell_x][cell_y
] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
self.draw_board(self.board, self.revealed_cells, self.
flags, self.questionmarks)
win = True
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if self.board[x][y][0] == MINE and not self.flags[x][y
] or self.board[x][y][0
] != MINE and not self.revealed_cells[x][y]:
win = False
if win:
self.game_over = True
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
<|reserved_special_token_0|>
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import sys
import random
import pygame
import pygame.locals
import time
CELL_SIDE_LENGTH = 40
CELL_MARGIN = 2
GRID_HEIGHT = 10
GRID_WIDTH = 10
X_BOARD_MARGIN = 50
Y_BOARD_MARGIN = 75
MENU_MARGIN = 100
DIFFICULTY = 0.1
FPS = 30
NUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY)
WINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT + CELL_MARGIN * GRID_HEIGHT +
Y_BOARD_MARGIN * 2)
WINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH + CELL_MARGIN * GRID_WIDTH +
X_BOARD_MARGIN * 2 + MENU_MARGIN)
RED = 255, 0, 0
YELLOW = 255, 255, 0
GREEN = 0, 255, 0
MIDGREEN = 40, 190, 40
CYAN = 0, 255, 255
BLUE = 0, 0, 255
DARKBLUE = 20, 20, 60
MAGENTA = 255, 0, 255
BLACK = 0, 0, 0
WHITE = 255, 255, 255
GRAY = 200, 200, 200
BG_COLOR = DARKBLUE
CELL_COLOR = GRAY
HIGHLIGHT_COLOR = CYAN
FLAG_COLOR = MIDGREEN
FLAG = 'flag'
MINE = 'mine'
CLEAR = 'clear'
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0
self.mouse_y = 0
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags,
self.questionmarks)
self.create_menu()
font = pygame.font.SysFont('times new roman', 25)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:
self.mouse_x, self.mouse_y = event.pos
right_click = True
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH
b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *
CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont('times new roman', 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None:
if not self.revealed_cells[cell_x][cell_y
] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
if not self.revealed_cells[cell_x][cell_y
] and left_click and not self.game_over:
if not self.flags[cell_x][cell_y
] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE:
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR:
self.reveal_cells(cell_x, cell_y, self.board,
self.revealed_cells, self.flags, self.
questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True
self.draw_board(self.board, self.revealed_cells,
self.flags, self.questionmarks)
if not self.revealed_cells[cell_x][cell_y
] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
self.draw_board(self.board, self.revealed_cells, self.
flags, self.questionmarks)
win = True
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if self.board[x][y][0] == MINE and not self.flags[x][y
] or self.board[x][y][0
] != MINE and not self.revealed_cells[x][y]:
win = False
if win:
self.game_over = True
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0]
board.append(column)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0:
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0:
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0:
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x][y + 1][0] == MINE:
mines += 1
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = str(mines), WHITE
return board
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y):
return cell_x, cell_y
return None, None
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if not revealed[cell_x][cell_y]:
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5)
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +
left, top), (left, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH -
CELL_MARGIN / 2, top + CELL_SIDE_LENGTH -
CELL_MARGIN / 2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render('?', 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else:
shape, color = self.get_shape_and_color(board, cell_x,
cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
def draw_icon(self, shape, color, cell_x, cell_y):
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y)
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top,
CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
else:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,
CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont('times new roman', fontsize)
label = font.render(shape, 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
def highlight_cell(self, cell_x, cell_y):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - CELL_MARGIN / 2,
top - CELL_MARGIN / 2, CELL_SIDE_LENGTH + CELL_MARGIN,
CELL_SIDE_LENGTH + CELL_MARGIN), 2)
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]:
return
if flags[x][y]:
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags,
questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags,
questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags,
questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont('times new roman', 20)
label = font.render(' High scores', 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
g = Game()
g.main()
<|reserved_special_token_1|>
import sys
import random
import pygame
import pygame.locals
import time
# TODO high scores, difficulties
# Absolutes (in pixels where not otherwise stated)
CELL_SIDE_LENGTH = 40 # Side length of each cell
CELL_MARGIN = 2 # Gap between cells
GRID_HEIGHT = 10 # How many cells are in the grid
GRID_WIDTH = 10
X_BOARD_MARGIN = 50 # Gap between grid and sides of board
Y_BOARD_MARGIN = 75
MENU_MARGIN = 100 # Amount of space on the right dedicated to the menu
DIFFICULTY = 0.1 # Ratio of bombs (10% by default)
FPS = 30 # frames per second (window refresh speed)
# Relatives (so board size can easily be changed)
NUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY) # Default about 10% of the board is mines
WINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT) + (CELL_MARGIN * GRID_HEIGHT) + (Y_BOARD_MARGIN * 2)
WINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH) + (CELL_MARGIN * GRID_WIDTH) + (X_BOARD_MARGIN * 2) + MENU_MARGIN
# R G B (not all used, but kept so theme can easily be changed)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
GREEN = (0, 255, 0)
MIDGREEN = (40, 190, 40)
CYAN = (0, 255, 255)
BLUE = (0, 0, 255)
DARKBLUE = (20, 20, 60)
MAGENTA = (255, 0, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GRAY = (200, 200, 200)
BG_COLOR = DARKBLUE # Background color
CELL_COLOR = GRAY # Universal cover color
HIGHLIGHT_COLOR = CYAN # Cell the cursor is currently hovering over
FLAG_COLOR = MIDGREEN
# Symbols
FLAG = 'flag'
MINE = 'mine'
CLEAR = 'clear'
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0 # Stores x-coordinate of mouse event
self.mouse_y = 0 # Stores y-coordinate of mouse event
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
self.create_menu()
font = pygame.font.SysFont("times new roman", 25)
# Timer (will be used to implement high scores)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
# Mouse event handling
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit() # Even if the window closes, we still need to manually stop the processes
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos # For hovering info
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1: # Left click
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3: # Right click
self.mouse_x, self.mouse_y = event.pos
right_click = True
# If user decided to start over, reinitialize game
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
# TODO tweak spacing on text
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + ((GRID_WIDTH / 4) * CELL_SIDE_LENGTH)
b_y = Y_BOARD_MARGIN + (Y_BOARD_MARGIN / 4) + (GRID_HEIGHT * CELL_SIDE_LENGTH) + (GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont("times new roman", 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None: # If mouse is hovering over a cell during mouse event
# Highlight cell
if not self.revealed_cells[cell_x][cell_y] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
# Digging somewhere
if not self.revealed_cells[cell_x][cell_y] and left_click and not self.game_over:
# So you can't accidentally click a flagged/question mark space
if not self.flags[cell_x][cell_y] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE: # If you dig a mine, reveal all cells & game over
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR: # If you dig a clear cell, reveal that cell
self.reveal_cells(cell_x, cell_y, self.board, self.revealed_cells, self.flags, self.questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True # Set the cell as revealed
# Redraw board after mouse event
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# Placing a flag- if flag already there, change flag to question mark.
# If question mark already there, turn to nothing. If nothing there, turn on flag
if not self.revealed_cells[cell_x][cell_y] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
# Flag is drawn in this method call
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# This block decides whether or not the player has won yet after a mouse event
win = True
for x in range(GRID_WIDTH): # If a cell is a mine and not flagged, or if a cell is clear
for y in range(GRID_HEIGHT): # but not revealed, then the game is not yet over
if (self.board[x][y][0] == MINE and not self.flags[x][y]) or (
self.board[x][y][0] != MINE and not self.revealed_cells[x][y]):
win = False
if win:
self.game_over = True
# Redraw the screen and wait for clock tick
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
# Bottom of board is made of only mines and clear cells, which is then selectively covered for gameplay
# Making randomized array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
# Create static under-board
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0] # so the next icon[0] is the one after this
board.append(column)
# This block determines how many mines are around each cell, and adds the number to the board's array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0: # If not on the left edge AND not on top edge
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0: # If not on right edge AND not on top edge
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0: # If not on right or left edge AND not on top edge
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1: # If not on riht or left edge AND on bottom edge
if board[x][y + 1][0] == MINE:
mines += 1
# If the cell is clear and there are mines around it, add the number of mines to board array
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = (str(mines), WHITE)
return board
# Used to show full board on game over & reset board on game start
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
# Convert row, column coordinates into x, y pixel coordinates (for drawing shapes)
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
# Convert x, y pixel coordinates to row, column coordinates (for mouse hovering)
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y): # If currently hovering over a cell
return cell_x, cell_y
return None, None # If not currently hovering over a cell
# Redraws board after mouse event
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Symbols not added on board creation must be drawn here: "unrevealed" boxes, flags, and question marks
if not revealed[cell_x][cell_y]:
# Draw a gray box over unrevealed cell, so value isn't affected but user can't see the value
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5) # Relative point halfway through cell
# top point, bottom left point, bottom right point
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half + left, top),
(left, top + CELL_SIDE_LENGTH - CELL_MARGIN/2),
(left + CELL_SIDE_LENGTH - CELL_MARGIN/2, top +
CELL_SIDE_LENGTH - CELL_MARGIN/2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render("?", 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else: # Draw revealed cells
shape, color = self.get_shape_and_color(board, cell_x, cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
# Draws icon passed to it in the stated cell
def draw_icon(self, shape, color, cell_x, cell_y):
# Relative point of quarter-way through cell
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y) # Drawing of all images starts at top left corner
# Draw the shapes
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
# Flag shape & question mark in draw_board because they are activated via mouse event
else: # Clear with num
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render(shape, 1, BLACK) # a cell with number corresponds to shapes "1", "2", etc.
SURFACE.blit(label, (left + quarter, top))
# Returns the shape and color of icon to be created in draw_icon method
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
# shape value for cell x, y is stored in board[x][y][0], color value in board[x][y][1]
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
# Draws a box around the cell the mouse is hovering over, 'highlighting' it
def highlight_cell(self, cell_x, cell_y):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Changes with cell size, but line width is hard-set at 2px (last argument)
pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - (CELL_MARGIN / 2), top - (CELL_MARGIN / 2),
CELL_SIDE_LENGTH + CELL_MARGIN, CELL_SIDE_LENGTH + CELL_MARGIN), 2)
# Reveals clear cells next to clear cell the user clicked (and clear cells next to those cells, etc.)
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]: # If the cell is already revealed, do nothing
return
if flags[x][y]: # If the cell already has a flag on it, do nothing
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags, questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags, questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont("times new roman", 20)
label = font.render(" High scores", 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50)) # view high scores
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
g = Game()
g.main()
|
flexible
|
{
"blob_id": "030bc0c7bdbbb09f722ffe4c82866726062f5317",
"index": 1962,
"step-1": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n <mask token>\n <mask token>\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n <mask token>\n <mask token>\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n <mask token>\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n <mask token>\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n while True:\n left_click = False\n right_click = False\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags,\n self.questionmarks)\n self.create_menu()\n font = pygame.font.SysFont('times new roman', 25)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit()\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH\n b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *\n CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont('times new roman', 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None:\n if not self.revealed_cells[cell_x][cell_y\n ] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n if not self.revealed_cells[cell_x][cell_y\n ] and left_click and not self.game_over:\n if not self.flags[cell_x][cell_y\n ] and not self.questionmarks[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n if self.board[cell_x][cell_y][0] == MINE:\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n elif self.board[cell_x][cell_y][0] == CLEAR:\n self.reveal_cells(cell_x, cell_y, self.board,\n self.revealed_cells, self.flags, self.\n questionmarks)\n else:\n self.revealed_cells[cell_x][cell_y] = True\n self.draw_board(self.board, self.revealed_cells,\n self.flags, self.questionmarks)\n if not self.revealed_cells[cell_x][cell_y\n ] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n self.draw_board(self.board, self.revealed_cells, self.\n flags, self.questionmarks)\n win = True\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if self.board[x][y][0] == MINE and not self.flags[x][y\n ] or self.board[x][y][0\n ] != MINE and not self.revealed_cells[x][y]:\n win = False\n if win:\n self.game_over = True\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-4": "import sys\nimport random\nimport pygame\nimport pygame.locals\nimport time\nCELL_SIDE_LENGTH = 40\nCELL_MARGIN = 2\nGRID_HEIGHT = 10\nGRID_WIDTH = 10\nX_BOARD_MARGIN = 50\nY_BOARD_MARGIN = 75\nMENU_MARGIN = 100\nDIFFICULTY = 0.1\nFPS = 30\nNUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY)\nWINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT + CELL_MARGIN * GRID_HEIGHT +\n Y_BOARD_MARGIN * 2)\nWINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH + CELL_MARGIN * GRID_WIDTH + \n X_BOARD_MARGIN * 2 + MENU_MARGIN)\nRED = 255, 0, 0\nYELLOW = 255, 255, 0\nGREEN = 0, 255, 0\nMIDGREEN = 40, 190, 40\nCYAN = 0, 255, 255\nBLUE = 0, 0, 255\nDARKBLUE = 20, 20, 60\nMAGENTA = 255, 0, 255\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nGRAY = 200, 200, 200\nBG_COLOR = DARKBLUE\nCELL_COLOR = GRAY\nHIGHLIGHT_COLOR = CYAN\nFLAG_COLOR = MIDGREEN\nFLAG = 'flag'\nMINE = 'mine'\nCLEAR = 'clear'\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n while True:\n left_click = False\n right_click = False\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags,\n self.questionmarks)\n self.create_menu()\n font = pygame.font.SysFont('times new roman', 25)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit()\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH\n b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *\n CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont('times new roman', 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None:\n if not self.revealed_cells[cell_x][cell_y\n ] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n if not self.revealed_cells[cell_x][cell_y\n ] and left_click and not self.game_over:\n if not self.flags[cell_x][cell_y\n ] and not self.questionmarks[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n if self.board[cell_x][cell_y][0] == MINE:\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n elif self.board[cell_x][cell_y][0] == CLEAR:\n self.reveal_cells(cell_x, cell_y, self.board,\n self.revealed_cells, self.flags, self.\n questionmarks)\n else:\n self.revealed_cells[cell_x][cell_y] = True\n self.draw_board(self.board, self.revealed_cells,\n self.flags, self.questionmarks)\n if not self.revealed_cells[cell_x][cell_y\n ] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n self.draw_board(self.board, self.revealed_cells, self.\n flags, self.questionmarks)\n win = True\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if self.board[x][y][0] == MINE and not self.flags[x][y\n ] or self.board[x][y][0\n ] != MINE and not self.revealed_cells[x][y]:\n win = False\n if win:\n self.game_over = True\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n\n def highlight_cell(self, cell_x, cell_y):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - CELL_MARGIN / 2,\n top - CELL_MARGIN / 2, CELL_SIDE_LENGTH + CELL_MARGIN, \n CELL_SIDE_LENGTH + CELL_MARGIN), 2)\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\ng = Game()\ng.main()\n",
"step-5": "import sys\nimport random\nimport pygame\nimport pygame.locals\nimport time\n\n# TODO high scores, difficulties\n\n# Absolutes (in pixels where not otherwise stated)\nCELL_SIDE_LENGTH = 40 # Side length of each cell\nCELL_MARGIN = 2 # Gap between cells\nGRID_HEIGHT = 10 # How many cells are in the grid\nGRID_WIDTH = 10\nX_BOARD_MARGIN = 50 # Gap between grid and sides of board\nY_BOARD_MARGIN = 75\nMENU_MARGIN = 100 # Amount of space on the right dedicated to the menu\nDIFFICULTY = 0.1 # Ratio of bombs (10% by default)\nFPS = 30 # frames per second (window refresh speed)\n\n# Relatives (so board size can easily be changed)\nNUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY) # Default about 10% of the board is mines\nWINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT) + (CELL_MARGIN * GRID_HEIGHT) + (Y_BOARD_MARGIN * 2)\nWINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH) + (CELL_MARGIN * GRID_WIDTH) + (X_BOARD_MARGIN * 2) + MENU_MARGIN\n\n# R G B (not all used, but kept so theme can easily be changed)\nRED = (255, 0, 0)\nYELLOW = (255, 255, 0)\nGREEN = (0, 255, 0)\nMIDGREEN = (40, 190, 40)\nCYAN = (0, 255, 255)\nBLUE = (0, 0, 255)\nDARKBLUE = (20, 20, 60)\nMAGENTA = (255, 0, 255)\nBLACK = (0, 0, 0)\nWHITE = (255, 255, 255)\nGRAY = (200, 200, 200)\n\nBG_COLOR = DARKBLUE # Background color\nCELL_COLOR = GRAY # Universal cover color\nHIGHLIGHT_COLOR = CYAN # Cell the cursor is currently hovering over\nFLAG_COLOR = MIDGREEN\n\n# Symbols\nFLAG = 'flag'\nMINE = 'mine'\nCLEAR = 'clear'\n\n\nclass Game:\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n\n self.mouse_x = 0 # Stores x-coordinate of mouse event\n self.mouse_y = 0 # Stores y-coordinate of mouse event\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n\n while True:\n left_click = False\n right_click = False\n\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n self.create_menu()\n\n font = pygame.font.SysFont(\"times new roman\", 25)\n\n # Timer (will be used to implement high scores)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n\n # Mouse event handling\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit() # Even if the window closes, we still need to manually stop the processes\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos # For hovering info\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1: # Left click\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3: # Right click\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n\n # If user decided to start over, reinitialize game\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n\n # TODO tweak spacing on text\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n\n a_x = X_BOARD_MARGIN + ((GRID_WIDTH / 4) * CELL_SIDE_LENGTH)\n b_y = Y_BOARD_MARGIN + (Y_BOARD_MARGIN / 4) + (GRID_HEIGHT * CELL_SIDE_LENGTH) + (GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont(\"times new roman\", 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None: # If mouse is hovering over a cell during mouse event\n\n # Highlight cell\n if not self.revealed_cells[cell_x][cell_y] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n\n # Digging somewhere\n if not self.revealed_cells[cell_x][cell_y] and left_click and not self.game_over:\n\n # So you can't accidentally click a flagged/question mark space\n if not self.flags[cell_x][cell_y] and not self.questionmarks[cell_x][cell_y]:\n\n self.flags[cell_x][cell_y] = False\n\n if self.board[cell_x][cell_y][0] == MINE: # If you dig a mine, reveal all cells & game over\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n\n elif self.board[cell_x][cell_y][0] == CLEAR: # If you dig a clear cell, reveal that cell\n self.reveal_cells(cell_x, cell_y, self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n else:\n self.revealed_cells[cell_x][cell_y] = True # Set the cell as revealed\n\n # Redraw board after mouse event\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n # Placing a flag- if flag already there, change flag to question mark.\n # If question mark already there, turn to nothing. If nothing there, turn on flag\n if not self.revealed_cells[cell_x][cell_y] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n\n # Flag is drawn in this method call\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n # This block decides whether or not the player has won yet after a mouse event\n win = True\n for x in range(GRID_WIDTH): # If a cell is a mine and not flagged, or if a cell is clear\n for y in range(GRID_HEIGHT): # but not revealed, then the game is not yet over\n if (self.board[x][y][0] == MINE and not self.flags[x][y]) or (\n self.board[x][y][0] != MINE and not self.revealed_cells[x][y]):\n win = False\n\n if win:\n self.game_over = True\n\n # Redraw the screen and wait for clock tick\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n\n # Bottom of board is made of only mines and clear cells, which is then selectively covered for gameplay\n # Making randomized array\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n\n # Create static under-board\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0] # so the next icon[0] is the one after this\n board.append(column)\n\n # This block determines how many mines are around each cell, and adds the number to the board's array\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n\n if x > 0:\n if y > 0: # If not on the left edge AND not on top edge\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n\n if x < GRID_WIDTH - 1:\n if y > 0: # If not on right edge AND not on top edge\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n\n if y > 0: # If not on right or left edge AND not on top edge\n if board[x][y - 1][0] == MINE:\n mines += 1\n\n if y < GRID_HEIGHT - 1: # If not on riht or left edge AND on bottom edge\n if board[x][y + 1][0] == MINE:\n mines += 1\n\n # If the cell is clear and there are mines around it, add the number of mines to board array\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = (str(mines), WHITE)\n\n return board\n\n # Used to show full board on game over & reset board on game start\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n # Convert row, column coordinates into x, y pixel coordinates (for drawing shapes)\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n # Convert x, y pixel coordinates to row, column coordinates (for mouse hovering)\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y): # If currently hovering over a cell\n return cell_x, cell_y\n return None, None # If not currently hovering over a cell\n\n # Redraws board after mouse event\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n\n # Symbols not added on board creation must be drawn here: \"unrevealed\" boxes, flags, and question marks\n if not revealed[cell_x][cell_y]:\n # Draw a gray box over unrevealed cell, so value isn't affected but user can't see the value\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5) # Relative point halfway through cell\n # top point, bottom left point, bottom right point\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half + left, top),\n (left, top + CELL_SIDE_LENGTH - CELL_MARGIN/2),\n (left + CELL_SIDE_LENGTH - CELL_MARGIN/2, top +\n CELL_SIDE_LENGTH - CELL_MARGIN/2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont(\"times new roman\", fontsize)\n label = font.render(\"?\", 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n else: # Draw revealed cells\n shape, color = self.get_shape_and_color(board, cell_x, cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n # Draws icon passed to it in the stated cell\n def draw_icon(self, shape, color, cell_x, cell_y):\n\n # Relative point of quarter-way through cell\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n\n left, top = self.get_top_left_coordinates(cell_x, cell_y) # Drawing of all images starts at top left corner\n\n # Draw the shapes\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n # Flag shape & question mark in draw_board because they are activated via mouse event\n\n else: # Clear with num\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont(\"times new roman\", fontsize)\n label = font.render(shape, 1, BLACK) # a cell with number corresponds to shapes \"1\", \"2\", etc.\n SURFACE.blit(label, (left + quarter, top))\n\n # Returns the shape and color of icon to be created in draw_icon method\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n # shape value for cell x, y is stored in board[x][y][0], color value in board[x][y][1]\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n\n # Draws a box around the cell the mouse is hovering over, 'highlighting' it\n def highlight_cell(self, cell_x, cell_y):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n # Changes with cell size, but line width is hard-set at 2px (last argument)\n pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - (CELL_MARGIN / 2), top - (CELL_MARGIN / 2),\n CELL_SIDE_LENGTH + CELL_MARGIN, CELL_SIDE_LENGTH + CELL_MARGIN), 2)\n\n # Reveals clear cells next to clear cell the user clicked (and clear cells next to those cells, etc.)\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]: # If the cell is already revealed, do nothing\n return\n if flags[x][y]: # If the cell already has a flag on it, do nothing\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags, questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags, questionmarks)\n\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags, questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags, questionmarks)\n\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont(\"times new roman\", 20)\n label = font.render(\" High scores\", 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50)) # view high scores\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\ng = Game()\ng.main()\n",
"step-ids": [
10,
16,
17,
21,
22
]
}
|
[
10,
16,
17,
21,
22
] |
<|reserved_special_token_0|>
class SwitchingBatchSampler(Sampler):
<|reserved_special_token_0|>
def __iter__(self):
second_size = self.data_len - self.first_size
self.first_iter = iter(torch.randperm(self.first_size))
self.second_iter = iter(torch.randperm(second_size) + self.first_size)
i = 0
count_first = 0
count_second = 0
batch = []
while count_first + count_second < self.data_len:
if self.turn == 0:
if count_first == self.first_size:
self.turn = 1
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.first_iter))
count_first += 1
i += 1
elif count_second == self.data_len - self.first_size:
self.turn = 0
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.second_iter))
count_second += 1
i += 1
if i != 0 and i % self.batch_size == 0:
yield batch
batch = []
if (count_first != self.first_size and count_second !=
second_size and random.uniform(0, 1) > 0.5):
self.turn = (self.turn + 1) % 2
if len(batch) > 0 and not self.drop_last:
yield batch
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SwitchingBatchSampler(Sampler):
<|reserved_special_token_0|>
def __iter__(self):
second_size = self.data_len - self.first_size
self.first_iter = iter(torch.randperm(self.first_size))
self.second_iter = iter(torch.randperm(second_size) + self.first_size)
i = 0
count_first = 0
count_second = 0
batch = []
while count_first + count_second < self.data_len:
if self.turn == 0:
if count_first == self.first_size:
self.turn = 1
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.first_iter))
count_first += 1
i += 1
elif count_second == self.data_len - self.first_size:
self.turn = 0
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.second_iter))
count_second += 1
i += 1
if i != 0 and i % self.batch_size == 0:
yield batch
batch = []
if (count_first != self.first_size and count_second !=
second_size and random.uniform(0, 1) > 0.5):
self.turn = (self.turn + 1) % 2
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return self.first_size // self.batch_size
+((self.data_len - self.first_size) // self.batch_size)
else:
return (self.first_size + self.batch_size - 1) // self.batch_size
+((self.data_len - self.first_size + self.batch_size - 1) //
self.batch_size)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SwitchingBatchSampler(Sampler):
def __init__(self, data_source, batch_size, drop_last=False):
self.data_source = data_source
self.batch_size = batch_size
self.drop_last = drop_last
self.data_len = len(self.data_source)
count = 0
for i in range(self.data_len):
if self.data_source.imgs[i][1] == 1:
break
else:
count += 1
print('Total Images: %d [Class 0: %d, Class 1: %d]\n' % (self.
data_len, count, self.data_len - count))
self.first_size = count
if random.uniform(0, 1) > 0.5:
self.turn = 0
else:
self.turn = 1
def __iter__(self):
second_size = self.data_len - self.first_size
self.first_iter = iter(torch.randperm(self.first_size))
self.second_iter = iter(torch.randperm(second_size) + self.first_size)
i = 0
count_first = 0
count_second = 0
batch = []
while count_first + count_second < self.data_len:
if self.turn == 0:
if count_first == self.first_size:
self.turn = 1
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.first_iter))
count_first += 1
i += 1
elif count_second == self.data_len - self.first_size:
self.turn = 0
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.second_iter))
count_second += 1
i += 1
if i != 0 and i % self.batch_size == 0:
yield batch
batch = []
if (count_first != self.first_size and count_second !=
second_size and random.uniform(0, 1) > 0.5):
self.turn = (self.turn + 1) % 2
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return self.first_size // self.batch_size
+((self.data_len - self.first_size) // self.batch_size)
else:
return (self.first_size + self.batch_size - 1) // self.batch_size
+((self.data_len - self.first_size + self.batch_size - 1) //
self.batch_size)
<|reserved_special_token_1|>
from torch.utils.data.sampler import Sampler
import torch
import random
class SwitchingBatchSampler(Sampler):
def __init__(self, data_source, batch_size, drop_last=False):
self.data_source = data_source
self.batch_size = batch_size
self.drop_last = drop_last
self.data_len = len(self.data_source)
count = 0
for i in range(self.data_len):
if self.data_source.imgs[i][1] == 1:
break
else:
count += 1
print('Total Images: %d [Class 0: %d, Class 1: %d]\n' % (self.
data_len, count, self.data_len - count))
self.first_size = count
if random.uniform(0, 1) > 0.5:
self.turn = 0
else:
self.turn = 1
def __iter__(self):
second_size = self.data_len - self.first_size
self.first_iter = iter(torch.randperm(self.first_size))
self.second_iter = iter(torch.randperm(second_size) + self.first_size)
i = 0
count_first = 0
count_second = 0
batch = []
while count_first + count_second < self.data_len:
if self.turn == 0:
if count_first == self.first_size:
self.turn = 1
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.first_iter))
count_first += 1
i += 1
elif count_second == self.data_len - self.first_size:
self.turn = 0
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.second_iter))
count_second += 1
i += 1
if i != 0 and i % self.batch_size == 0:
yield batch
batch = []
if (count_first != self.first_size and count_second !=
second_size and random.uniform(0, 1) > 0.5):
self.turn = (self.turn + 1) % 2
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return self.first_size // self.batch_size
+((self.data_len - self.first_size) // self.batch_size)
else:
return (self.first_size + self.batch_size - 1) // self.batch_size
+((self.data_len - self.first_size + self.batch_size - 1) //
self.batch_size)
<|reserved_special_token_1|>
from torch.utils.data.sampler import Sampler
import torch
import random
class SwitchingBatchSampler(Sampler):
def __init__(self, data_source, batch_size, drop_last=False):
self.data_source = data_source
self.batch_size = batch_size
self.drop_last = drop_last
# Divide the indices into two indices groups
self.data_len = len(self.data_source)
count = 0
for i in range(self.data_len):
if self.data_source.imgs[i][1] == 1:
break
else:
count += 1
print("Total Images: %d [Class 0: %d, Class 1: %d]\n"%(self.data_len, count, (self.data_len-count)))
self.first_size = count
if random.uniform(0, 1) > 0.5:
self.turn = 0
else:
self.turn = 1
def __iter__(self):
# Initialize both iters
second_size = self.data_len - self.first_size
self.first_iter = iter(torch.randperm(self.first_size))
self.second_iter = iter(torch.randperm(second_size) + self.first_size)
# Counting variables
i = 0
count_first = 0 # Counts how many imgs of first iter has been returned
count_second = 0 # Counts second iter
batch = []
# Until no data left, keep iterating
while count_first+count_second < self.data_len:
# Fill the batch
if self.turn == 0:
if count_first == self.first_size:
self.turn = 1
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.first_iter))
count_first += 1
i += 1
else:
if count_second == (self.data_len-self.first_size):
self.turn = 0
if len(batch) > 0 and not self.drop_last:
yield batch
batch = []
else:
batch.append(next(self.second_iter))
count_second += 1
i += 1
# Yield the batch and switch the turn randomly
if i != 0 and i % self.batch_size == 0:
yield batch
batch = []
if count_first != self.first_size and count_second != second_size and random.uniform(0, 1) > 0.5:
self.turn = (self.turn + 1) % 2
# If drop_last is False, return the rest
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return (self.first_size // self.batch_size)
+ ((self.data_len - self.first_size) // self.batch_size)
else:
return ((self.first_size + self.batch_size - 1) // self.batch_size)
+ ((self.data_len - self.first_size + self.batch_size - 1) // self.batch_size)
|
flexible
|
{
"blob_id": "6b7bc40ba842ff565e7141fb1d51def99d9ab96a",
"index": 1124,
"step-1": "<mask token>\n\n\nclass SwitchingBatchSampler(Sampler):\n <mask token>\n\n def __iter__(self):\n second_size = self.data_len - self.first_size\n self.first_iter = iter(torch.randperm(self.first_size))\n self.second_iter = iter(torch.randperm(second_size) + self.first_size)\n i = 0\n count_first = 0\n count_second = 0\n batch = []\n while count_first + count_second < self.data_len:\n if self.turn == 0:\n if count_first == self.first_size:\n self.turn = 1\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.first_iter))\n count_first += 1\n i += 1\n elif count_second == self.data_len - self.first_size:\n self.turn = 0\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.second_iter))\n count_second += 1\n i += 1\n if i != 0 and i % self.batch_size == 0:\n yield batch\n batch = []\n if (count_first != self.first_size and count_second !=\n second_size and random.uniform(0, 1) > 0.5):\n self.turn = (self.turn + 1) % 2\n if len(batch) > 0 and not self.drop_last:\n yield batch\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SwitchingBatchSampler(Sampler):\n <mask token>\n\n def __iter__(self):\n second_size = self.data_len - self.first_size\n self.first_iter = iter(torch.randperm(self.first_size))\n self.second_iter = iter(torch.randperm(second_size) + self.first_size)\n i = 0\n count_first = 0\n count_second = 0\n batch = []\n while count_first + count_second < self.data_len:\n if self.turn == 0:\n if count_first == self.first_size:\n self.turn = 1\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.first_iter))\n count_first += 1\n i += 1\n elif count_second == self.data_len - self.first_size:\n self.turn = 0\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.second_iter))\n count_second += 1\n i += 1\n if i != 0 and i % self.batch_size == 0:\n yield batch\n batch = []\n if (count_first != self.first_size and count_second !=\n second_size and random.uniform(0, 1) > 0.5):\n self.turn = (self.turn + 1) % 2\n if len(batch) > 0 and not self.drop_last:\n yield batch\n\n def __len__(self):\n if self.drop_last:\n return self.first_size // self.batch_size\n +((self.data_len - self.first_size) // self.batch_size)\n else:\n return (self.first_size + self.batch_size - 1) // self.batch_size\n +((self.data_len - self.first_size + self.batch_size - 1) //\n self.batch_size)\n",
"step-3": "<mask token>\n\n\nclass SwitchingBatchSampler(Sampler):\n\n def __init__(self, data_source, batch_size, drop_last=False):\n self.data_source = data_source\n self.batch_size = batch_size\n self.drop_last = drop_last\n self.data_len = len(self.data_source)\n count = 0\n for i in range(self.data_len):\n if self.data_source.imgs[i][1] == 1:\n break\n else:\n count += 1\n print('Total Images: %d [Class 0: %d, Class 1: %d]\\n' % (self.\n data_len, count, self.data_len - count))\n self.first_size = count\n if random.uniform(0, 1) > 0.5:\n self.turn = 0\n else:\n self.turn = 1\n\n def __iter__(self):\n second_size = self.data_len - self.first_size\n self.first_iter = iter(torch.randperm(self.first_size))\n self.second_iter = iter(torch.randperm(second_size) + self.first_size)\n i = 0\n count_first = 0\n count_second = 0\n batch = []\n while count_first + count_second < self.data_len:\n if self.turn == 0:\n if count_first == self.first_size:\n self.turn = 1\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.first_iter))\n count_first += 1\n i += 1\n elif count_second == self.data_len - self.first_size:\n self.turn = 0\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.second_iter))\n count_second += 1\n i += 1\n if i != 0 and i % self.batch_size == 0:\n yield batch\n batch = []\n if (count_first != self.first_size and count_second !=\n second_size and random.uniform(0, 1) > 0.5):\n self.turn = (self.turn + 1) % 2\n if len(batch) > 0 and not self.drop_last:\n yield batch\n\n def __len__(self):\n if self.drop_last:\n return self.first_size // self.batch_size\n +((self.data_len - self.first_size) // self.batch_size)\n else:\n return (self.first_size + self.batch_size - 1) // self.batch_size\n +((self.data_len - self.first_size + self.batch_size - 1) //\n self.batch_size)\n",
"step-4": "from torch.utils.data.sampler import Sampler\nimport torch\nimport random\n\n\nclass SwitchingBatchSampler(Sampler):\n\n def __init__(self, data_source, batch_size, drop_last=False):\n self.data_source = data_source\n self.batch_size = batch_size\n self.drop_last = drop_last\n self.data_len = len(self.data_source)\n count = 0\n for i in range(self.data_len):\n if self.data_source.imgs[i][1] == 1:\n break\n else:\n count += 1\n print('Total Images: %d [Class 0: %d, Class 1: %d]\\n' % (self.\n data_len, count, self.data_len - count))\n self.first_size = count\n if random.uniform(0, 1) > 0.5:\n self.turn = 0\n else:\n self.turn = 1\n\n def __iter__(self):\n second_size = self.data_len - self.first_size\n self.first_iter = iter(torch.randperm(self.first_size))\n self.second_iter = iter(torch.randperm(second_size) + self.first_size)\n i = 0\n count_first = 0\n count_second = 0\n batch = []\n while count_first + count_second < self.data_len:\n if self.turn == 0:\n if count_first == self.first_size:\n self.turn = 1\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.first_iter))\n count_first += 1\n i += 1\n elif count_second == self.data_len - self.first_size:\n self.turn = 0\n if len(batch) > 0 and not self.drop_last:\n yield batch\n batch = []\n else:\n batch.append(next(self.second_iter))\n count_second += 1\n i += 1\n if i != 0 and i % self.batch_size == 0:\n yield batch\n batch = []\n if (count_first != self.first_size and count_second !=\n second_size and random.uniform(0, 1) > 0.5):\n self.turn = (self.turn + 1) % 2\n if len(batch) > 0 and not self.drop_last:\n yield batch\n\n def __len__(self):\n if self.drop_last:\n return self.first_size // self.batch_size\n +((self.data_len - self.first_size) // self.batch_size)\n else:\n return (self.first_size + self.batch_size - 1) // self.batch_size\n +((self.data_len - self.first_size + self.batch_size - 1) //\n self.batch_size)\n",
"step-5": "from torch.utils.data.sampler import Sampler\nimport torch\nimport random\n\nclass SwitchingBatchSampler(Sampler):\n\n\tdef __init__(self, data_source, batch_size, drop_last=False):\n\t\tself.data_source = data_source\n\t\tself.batch_size = batch_size\n\t\tself.drop_last = drop_last\n\n\t\t# Divide the indices into two indices groups\n\t\tself.data_len = len(self.data_source)\n\t\tcount = 0\n\t\tfor i in range(self.data_len):\n\t\t\tif self.data_source.imgs[i][1] == 1:\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tcount += 1\n\n\t\tprint(\"Total Images: %d [Class 0: %d, Class 1: %d]\\n\"%(self.data_len, count, (self.data_len-count)))\n\n\t\tself.first_size = count\n\n\t\tif random.uniform(0, 1) > 0.5:\n\t\t\tself.turn = 0\n\t\telse:\n\t\t\tself.turn = 1\n\n\n\tdef __iter__(self):\n\t\t# Initialize both iters\n\t\tsecond_size = self.data_len - self.first_size\n\t\tself.first_iter = iter(torch.randperm(self.first_size))\n\t\tself.second_iter = iter(torch.randperm(second_size) + self.first_size)\n\n\t\t# Counting variables\n\t\ti = 0\n\t\tcount_first = 0 # Counts how many imgs of first iter has been returned\n\t\tcount_second = 0 # Counts second iter\t\t\n\t\tbatch = []\n\n\t\t# Until no data left, keep iterating\n\t\twhile count_first+count_second < self.data_len:\n\t\t\t# Fill the batch\n\t\t\tif self.turn == 0:\n\t\t\t\tif count_first == self.first_size:\n\t\t\t\t\tself.turn = 1\n\t\t\t\t\tif len(batch) > 0 and not self.drop_last:\n\t\t\t\t\t\tyield batch\n\t\t\t\t\tbatch = [] \t\t\t\t\n\t\t\t\telse:\n\t\t\t\t\tbatch.append(next(self.first_iter))\n\t\t\t\t\tcount_first += 1\n\t\t\t\t\ti += 1\n\t\t\telse:\n\t\t\t\tif count_second == (self.data_len-self.first_size):\n\t\t\t\t\tself.turn = 0\n\t\t\t\t\tif len(batch) > 0 and not self.drop_last:\n\t\t\t\t\t\tyield batch\n\t\t\t\t\tbatch = [] \t\n\t\t\t\telse:\n\t\t\t\t\tbatch.append(next(self.second_iter))\n\t\t\t\t\tcount_second += 1\n\t\t\t\t\ti += 1\n\t\t\t# Yield the batch and switch the turn randomly\n\t\t\tif i != 0 and i % self.batch_size == 0:\n\t\t\t\tyield batch\n\t\t\t\tbatch = []\n\t\t\t\tif count_first != self.first_size and count_second != second_size and random.uniform(0, 1) > 0.5:\n\t\t\t\t\tself.turn = (self.turn + 1) % 2\n\n\t\t# If drop_last is False, return the rest\n\t\tif len(batch) > 0 and not self.drop_last:\n\t\t\tyield batch\n\n\n\tdef __len__(self):\n\t\tif self.drop_last:\n\t\t\treturn (self.first_size // self.batch_size)\n\t\t\t+ ((self.data_len - self.first_size) // self.batch_size)\n\t\telse:\n\t\t\treturn ((self.first_size + self.batch_size - 1) // self.batch_size)\n\t\t\t+ ((self.data_len - self.first_size + self.batch_size - 1) // self.batch_size)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.urls import path
from .views import MainView
app_name = "bio"
# app_name will help us do a reverse look-up latter.
urlpatterns = [
path('get_mtx_data', MainView.as_view()),
]
|
normal
|
{
"blob_id": "e3a984294cad5830358df50fa00111017cbe226d",
"index": 3678,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'bio'\nurlpatterns = [path('get_mtx_data', MainView.as_view())]\n",
"step-3": "from django.urls import path\nfrom .views import MainView\napp_name = 'bio'\nurlpatterns = [path('get_mtx_data', MainView.as_view())]\n",
"step-4": "from django.urls import path\n\nfrom .views import MainView\n\napp_name = \"bio\"\n# app_name will help us do a reverse look-up latter.\nurlpatterns = [\n path('get_mtx_data', MainView.as_view()),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
class Bus:
def __init__(self):
self.seats=0
self.dict_seats={}
self.num_passenger = 0
def conctructor(self,seats):
self.seats=seats
for i in range(1,self.seats+1):
self.dict_seats.update({i:"Free"})
return self.dict_seats
def getOn(self, passenger_name=None):
self.num_passenger += 1
if self.num_passenger > self.seats:
sys.exit(f'Sorry dear {passenger_name}. There is no free seat on the bus')
free_list = list(self.dict_seats.values())
free_num_seat = int(free_list.index("Free"))+1
self.dict_seats.update({free_num_seat : passenger_name})
def getOn_2(self,*names):
str_names=str(names)
str_names.strip("")
list_names=str_names.split(" ")
for i in list_names:
self.num_passenger += 1
if self.num_passenger > self.seats:
sys.exit(f'Sorry dear {i}. There is no free seat on the bus')
free_list=list(self.dict_seats.values())
free_num_seat=int(free_list.index("Free"))+1
self.dict_seats.update({free_num_seat : i})
def getOf(self,passenger_name=None):
self.num_passenger -= 1
close_list = list(self.dict_seats.values())
if passenger_name in close_list:
close_num_seat = int(close_list.index(passenger_name) + 1)
self.dict_seats.update({close_num_seat: "Free"})
else:
print(f'Passenger {passenger_name} is not on the bus')
def __str__(self):
return f'Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}'\
f'\nFree seats - {self.seats-self.num_passenger}'\
f'\nOther details - {self.dict_seats}'
|
normal
|
{
"blob_id": "1396509f65d194eeaefa3841e152b7078abf0032",
"index": 5549,
"step-1": "<mask token>\n\n\nclass Bus:\n <mask token>\n <mask token>\n <mask token>\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n <mask token>\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-3": "<mask token>\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(\n f'Sorry dear {passenger_name}. There is no free seat on the bus'\n )\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: passenger_name})\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-4": "import sys\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(\n f'Sorry dear {passenger_name}. There is no free seat on the bus'\n )\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: passenger_name})\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-5": "import sys\nclass Bus:\n def __init__(self):\n self.seats=0\n self.dict_seats={}\n self.num_passenger = 0\n\n def conctructor(self,seats):\n self.seats=seats\n for i in range(1,self.seats+1):\n self.dict_seats.update({i:\"Free\"})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {passenger_name}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index(\"Free\"))+1\n self.dict_seats.update({free_num_seat : passenger_name})\n\n def getOn_2(self,*names):\n str_names=str(names)\n str_names.strip(\"\")\n list_names=str_names.split(\" \")\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list=list(self.dict_seats.values())\n free_num_seat=int(free_list.index(\"Free\"))+1\n self.dict_seats.update({free_num_seat : i})\n\n\n\n def getOf(self,passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: \"Free\"})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n def __str__(self):\n return f'Number of seats on the bus - {self.seats}\\nNumber of passenger - {self.num_passenger}'\\\n f'\\nFree seats - {self.seats-self.num_passenger}'\\\n f'\\nOther details - {self.dict_seats}'\n\n",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
# Generated by Django 3.2.7 on 2021-10-01 08:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0005_alter_users_is_active'),
]
operations = [
migrations.AlterModelManagers(
name='users',
managers=[
],
),
]
|
normal
|
{
"blob_id": "6670295241516664e30c7db5cd3b5e2fb6c4fb05",
"index": 1985,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app', '0005_alter_users_is_active')]\n operations = [migrations.AlterModelManagers(name='users', managers=[])]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app', '0005_alter_users_is_active')]\n operations = [migrations.AlterModelManagers(name='users', managers=[])]\n",
"step-5": "# Generated by Django 3.2.7 on 2021-10-01 08:36\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app', '0005_alter_users_is_active'),\n ]\n\n operations = [\n migrations.AlterModelManagers(\n name='users',\n managers=[\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MyBot(BaseAgent):
<|reserved_special_token_0|>
def initialize_agent(self):
self.boost_pad_tracker.initialize_boosts(self.get_field_info())
self.info = MyInfo(self.team, self.index)
self.strat = Strategy(self.info)
self.car = Car()
def get_output(self, packet: GameTickPacket) ->SimpleControllerState:
"""
This function will be called by the framework many times per second. This is where you can
see the motion of the ball, etc. and return controls to drive your car.
"""
self.boost_pad_tracker.update_boost_status(packet)
self.car.updateCar(packet, self.index)
self.info.read_packet(packet, self.get_ball_prediction_struct().slices)
if self.action is None:
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
print(controls.steer)
return controls
self.renderer.draw_string_3d(self.car.loc, 1, 1,
f'Speed: {self.car.vel.length():.1f}', self.renderer.white())
if self.action.name:
self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1,
1, self.action.name, self.renderer.white())
if packet.game_info.is_kickoff_pause and not isinstance(self.action,
kickoff):
self.action = kickoff(self.car.loc)
controls = self.action.tick(self.info)
return controls
if self.action and not self.action.done:
controls = self.action.tick(self.info)
if controls is not None:
return controls
elif self.action.done:
print('choosing new action')
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
return controls
ball_location = Vec3(packet.game_ball.physics.location)
if self.car.loc.dist(ball_location) > 1500:
ball_prediction = self.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, packet.
game_info.seconds_elapsed + 2)
target_location = Vec3(ball_in_future.physics.location)
self.renderer.draw_line_3d(ball_location, target_location, self
.renderer.cyan())
else:
target_location = ball_location
"""
if 750 < self.car.vel.length() < 800:
# We'll do a front flip if the car is moving at a certain speed.
return self.begin_front_flip(packet)
#controls = self.action.controls
controls = SimpleControllerState()
controls.steer = steer_toward_target(self.car, target_location)
controls.throttle = 1.0
# You can set more controls if you want, like controls.boost.
"""
print('the fuck we doin here?!?!?!?')
return controls
def begin_front_flip(self, packet):
self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection
.Information_IGotIt)
self.action = Sequence([ControlStep(duration=0.05, controls=
SimpleControllerState(jump=True)), ControlStep(duration=0.05,
controls=SimpleControllerState(jump=False)), ControlStep(
duration=0.2, controls=SimpleControllerState(jump=True, pitch=-
1)), ControlStep(duration=0.8, controls=SimpleControllerState())])
return self.action.tick(packet)
def is_kickoff(self, ball_location, ball_velocity):
return ball_location.flat(
) == kickoff_location and ball_velocity.length() == 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyBot(BaseAgent):
def __init__(self, name, team, index):
super().__init__(name, team, index)
self.action: Action = kickoff
self.info: GameInfo = None
self.car: Car = None
self.boost_pad_tracker = BoostPadTracker()
self.stat: Strategy = None
self.action: Action = None
def initialize_agent(self):
self.boost_pad_tracker.initialize_boosts(self.get_field_info())
self.info = MyInfo(self.team, self.index)
self.strat = Strategy(self.info)
self.car = Car()
def get_output(self, packet: GameTickPacket) ->SimpleControllerState:
"""
This function will be called by the framework many times per second. This is where you can
see the motion of the ball, etc. and return controls to drive your car.
"""
self.boost_pad_tracker.update_boost_status(packet)
self.car.updateCar(packet, self.index)
self.info.read_packet(packet, self.get_ball_prediction_struct().slices)
if self.action is None:
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
print(controls.steer)
return controls
self.renderer.draw_string_3d(self.car.loc, 1, 1,
f'Speed: {self.car.vel.length():.1f}', self.renderer.white())
if self.action.name:
self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1,
1, self.action.name, self.renderer.white())
if packet.game_info.is_kickoff_pause and not isinstance(self.action,
kickoff):
self.action = kickoff(self.car.loc)
controls = self.action.tick(self.info)
return controls
if self.action and not self.action.done:
controls = self.action.tick(self.info)
if controls is not None:
return controls
elif self.action.done:
print('choosing new action')
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
return controls
ball_location = Vec3(packet.game_ball.physics.location)
if self.car.loc.dist(ball_location) > 1500:
ball_prediction = self.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, packet.
game_info.seconds_elapsed + 2)
target_location = Vec3(ball_in_future.physics.location)
self.renderer.draw_line_3d(ball_location, target_location, self
.renderer.cyan())
else:
target_location = ball_location
"""
if 750 < self.car.vel.length() < 800:
# We'll do a front flip if the car is moving at a certain speed.
return self.begin_front_flip(packet)
#controls = self.action.controls
controls = SimpleControllerState()
controls.steer = steer_toward_target(self.car, target_location)
controls.throttle = 1.0
# You can set more controls if you want, like controls.boost.
"""
print('the fuck we doin here?!?!?!?')
return controls
def begin_front_flip(self, packet):
self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection
.Information_IGotIt)
self.action = Sequence([ControlStep(duration=0.05, controls=
SimpleControllerState(jump=True)), ControlStep(duration=0.05,
controls=SimpleControllerState(jump=False)), ControlStep(
duration=0.2, controls=SimpleControllerState(jump=True, pitch=-
1)), ControlStep(duration=0.8, controls=SimpleControllerState())])
return self.action.tick(packet)
def is_kickoff(self, ball_location, ball_velocity):
return ball_location.flat(
) == kickoff_location and ball_velocity.length() == 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
kickoff_location = Vec3(0, 0, 0)
class MyBot(BaseAgent):
def __init__(self, name, team, index):
super().__init__(name, team, index)
self.action: Action = kickoff
self.info: GameInfo = None
self.car: Car = None
self.boost_pad_tracker = BoostPadTracker()
self.stat: Strategy = None
self.action: Action = None
def initialize_agent(self):
self.boost_pad_tracker.initialize_boosts(self.get_field_info())
self.info = MyInfo(self.team, self.index)
self.strat = Strategy(self.info)
self.car = Car()
def get_output(self, packet: GameTickPacket) ->SimpleControllerState:
"""
This function will be called by the framework many times per second. This is where you can
see the motion of the ball, etc. and return controls to drive your car.
"""
self.boost_pad_tracker.update_boost_status(packet)
self.car.updateCar(packet, self.index)
self.info.read_packet(packet, self.get_ball_prediction_struct().slices)
if self.action is None:
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
print(controls.steer)
return controls
self.renderer.draw_string_3d(self.car.loc, 1, 1,
f'Speed: {self.car.vel.length():.1f}', self.renderer.white())
if self.action.name:
self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1,
1, self.action.name, self.renderer.white())
if packet.game_info.is_kickoff_pause and not isinstance(self.action,
kickoff):
self.action = kickoff(self.car.loc)
controls = self.action.tick(self.info)
return controls
if self.action and not self.action.done:
controls = self.action.tick(self.info)
if controls is not None:
return controls
elif self.action.done:
print('choosing new action')
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
return controls
ball_location = Vec3(packet.game_ball.physics.location)
if self.car.loc.dist(ball_location) > 1500:
ball_prediction = self.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, packet.
game_info.seconds_elapsed + 2)
target_location = Vec3(ball_in_future.physics.location)
self.renderer.draw_line_3d(ball_location, target_location, self
.renderer.cyan())
else:
target_location = ball_location
"""
if 750 < self.car.vel.length() < 800:
# We'll do a front flip if the car is moving at a certain speed.
return self.begin_front_flip(packet)
#controls = self.action.controls
controls = SimpleControllerState()
controls.steer = steer_toward_target(self.car, target_location)
controls.throttle = 1.0
# You can set more controls if you want, like controls.boost.
"""
print('the fuck we doin here?!?!?!?')
return controls
def begin_front_flip(self, packet):
self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection
.Information_IGotIt)
self.action = Sequence([ControlStep(duration=0.05, controls=
SimpleControllerState(jump=True)), ControlStep(duration=0.05,
controls=SimpleControllerState(jump=False)), ControlStep(
duration=0.2, controls=SimpleControllerState(jump=True, pitch=-
1)), ControlStep(duration=0.8, controls=SimpleControllerState())])
return self.action.tick(packet)
def is_kickoff(self, ball_location, ball_velocity):
return ball_location.flat(
) == kickoff_location and ball_velocity.length() == 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from rlbot.agents.base_agent import BaseAgent, GameTickPacket, SimpleControllerState
from Decisions.challengeGame import ChallengeGame
from Decisions.info import MyInfo, Car
from Decisions.strat import Strategy
from Drawing.Drawing import DrawingTool
from util.vec import Vec3
from Actions.Kickoff import kickoff
from Actions.Chase import chase
from rlbot.messages.flat.QuickChatSelection import QuickChatSelection
from rlbot.utils.structures.game_data_struct import GameTickPacket
from util.ball_prediction_analysis import find_slice_at_time
from util.boost_pad_tracker import BoostPadTracker
from util.drive import steer_toward_target
from util.sequence import Sequence, ControlStep
from util.vec import Vec3
import math
import time
from math import radians
kickoff_location = Vec3(0, 0, 0)
class MyBot(BaseAgent):
def __init__(self, name, team, index):
super().__init__(name, team, index)
self.action: Action = kickoff
self.info: GameInfo = None
self.car: Car = None
self.boost_pad_tracker = BoostPadTracker()
self.stat: Strategy = None
self.action: Action = None
def initialize_agent(self):
self.boost_pad_tracker.initialize_boosts(self.get_field_info())
self.info = MyInfo(self.team, self.index)
self.strat = Strategy(self.info)
self.car = Car()
def get_output(self, packet: GameTickPacket) ->SimpleControllerState:
"""
This function will be called by the framework many times per second. This is where you can
see the motion of the ball, etc. and return controls to drive your car.
"""
self.boost_pad_tracker.update_boost_status(packet)
self.car.updateCar(packet, self.index)
self.info.read_packet(packet, self.get_ball_prediction_struct().slices)
if self.action is None:
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
print(controls.steer)
return controls
self.renderer.draw_string_3d(self.car.loc, 1, 1,
f'Speed: {self.car.vel.length():.1f}', self.renderer.white())
if self.action.name:
self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1,
1, self.action.name, self.renderer.white())
if packet.game_info.is_kickoff_pause and not isinstance(self.action,
kickoff):
self.action = kickoff(self.car.loc)
controls = self.action.tick(self.info)
return controls
if self.action and not self.action.done:
controls = self.action.tick(self.info)
if controls is not None:
return controls
elif self.action.done:
print('choosing new action')
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
return controls
ball_location = Vec3(packet.game_ball.physics.location)
if self.car.loc.dist(ball_location) > 1500:
ball_prediction = self.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, packet.
game_info.seconds_elapsed + 2)
target_location = Vec3(ball_in_future.physics.location)
self.renderer.draw_line_3d(ball_location, target_location, self
.renderer.cyan())
else:
target_location = ball_location
"""
if 750 < self.car.vel.length() < 800:
# We'll do a front flip if the car is moving at a certain speed.
return self.begin_front_flip(packet)
#controls = self.action.controls
controls = SimpleControllerState()
controls.steer = steer_toward_target(self.car, target_location)
controls.throttle = 1.0
# You can set more controls if you want, like controls.boost.
"""
print('the fuck we doin here?!?!?!?')
return controls
def begin_front_flip(self, packet):
self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection
.Information_IGotIt)
self.action = Sequence([ControlStep(duration=0.05, controls=
SimpleControllerState(jump=True)), ControlStep(duration=0.05,
controls=SimpleControllerState(jump=False)), ControlStep(
duration=0.2, controls=SimpleControllerState(jump=True, pitch=-
1)), ControlStep(duration=0.8, controls=SimpleControllerState())])
return self.action.tick(packet)
def is_kickoff(self, ball_location, ball_velocity):
return ball_location.flat(
) == kickoff_location and ball_velocity.length() == 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from rlbot.agents.base_agent import BaseAgent, GameTickPacket, SimpleControllerState
#from rlbot.utils.structures.game_data_struct import GameTickPacket
from Decisions.challengeGame import ChallengeGame
from Decisions.info import MyInfo, Car
from Decisions.strat import Strategy
from Drawing.Drawing import DrawingTool
from util.vec import Vec3
from Actions.Kickoff import kickoff
from Actions.Chase import chase
# Blue team's (0) goal is located at (0, -5120)
# Orange (1) at (0, 5120)
# ball R = 92
from rlbot.messages.flat.QuickChatSelection import QuickChatSelection
from rlbot.utils.structures.game_data_struct import GameTickPacket
from util.ball_prediction_analysis import find_slice_at_time
from util.boost_pad_tracker import BoostPadTracker
from util.drive import steer_toward_target
from util.sequence import Sequence, ControlStep
from util.vec import Vec3
import math
import time
from math import radians
kickoff_location = Vec3(0, 0, 0)
class MyBot(BaseAgent):
def __init__(self, name, team, index):
super().__init__(name, team, index)
self.action: Action = kickoff
self.info : GameInfo = None
self.car : Car = None
self.boost_pad_tracker = BoostPadTracker()
self.stat : Strategy = None
self.action : Action = None
def initialize_agent(self):
# Set up information about the boost pads now that the game is active and the info is available
self.boost_pad_tracker.initialize_boosts(self.get_field_info())
self.info = MyInfo(self.team, self.index)
self.strat = Strategy(self.info)
self.car = Car()
def get_output(self, packet: GameTickPacket) -> SimpleControllerState:
"""
This function will be called by the framework many times per second. This is where you can
see the motion of the ball, etc. and return controls to drive your car.
"""
# Keep our boost pad info updated with which pads are currently active
self.boost_pad_tracker.update_boost_status(packet)
#self.info = self.info.read_packet(packet)
self.car.updateCar(packet, self.index)
self.info.read_packet(packet, self.get_ball_prediction_struct().slices)
#print("in main target: {}".format(self.get_ball_prediction_struct().slices[0].physics.location))
#self.renderer.draw_line_3d(self.car.loc, target_location, self.renderer.white())
#self.renderer.draw_rect_3d(target_location, 8, 8, True, self.renderer.cyan(), centered=True)
#cg = ChallengeGame(self.car, bp_struct)
#print(cg.get_time_to_loc(cg.challenge_loc))
# This is good to keep at the beginning of get_output. It will allow you to continue
# any sequences that you may have started during a previous call to get_output.
if self.action is None:
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
print(controls.steer)
return controls
self.renderer.draw_string_3d(self.car.loc, 1, 1, f'Speed: {self.car.vel.length():.1f}', self.renderer.white())
if self.action.name:
self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, 1, self.action.name, self.renderer.white())
if packet.game_info.is_kickoff_pause and not isinstance(self.action, kickoff):
#self.logger.info(self.action)
self.action = kickoff(self.car.loc)
#print("Sequence is: {}".format(self.action))
#print("Sequence finished: {}".format(self.action.done))
controls = self.action.tick(self.info)
return controls
if self.action and not self.action.done:
controls = self.action.tick(self.info)
#print("action is: {}".format(self.action.name))
if controls is not None:
return controls
elif self.action.done:
print("choosing new action")
self.action = self.strat.chooseAction(self.info)
controls = self.action.tick(self.info)
return controls
# Gather some information about our car and the ball
ball_location = Vec3(packet.game_ball.physics.location)
if self.car.loc.dist(ball_location) > 1500:
# We're far away from the ball, let's try to lead it a little bit
ball_prediction = self.get_ball_prediction_struct() # This can predict bounces, etc
ball_in_future = find_slice_at_time(ball_prediction, packet.game_info.seconds_elapsed + 2)
target_location = Vec3(ball_in_future.physics.location)
self.renderer.draw_line_3d(ball_location, target_location, self.renderer.cyan())
else:
target_location = ball_location
# Draw some things to help understand what the bot is thinking
#self.renderer.draw_string_2d(100, 100, 1, 1, f'Ball at: {ball_location}', self.renderer.white())
'''
if 750 < self.car.vel.length() < 800:
# We'll do a front flip if the car is moving at a certain speed.
return self.begin_front_flip(packet)
#controls = self.action.controls
controls = SimpleControllerState()
controls.steer = steer_toward_target(self.car, target_location)
controls.throttle = 1.0
# You can set more controls if you want, like controls.boost.
'''
print("the fuck we doin here?!?!?!?")
return controls
def begin_front_flip(self, packet):
# Send some quickchat just for fun
self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection.Information_IGotIt)
# Do a front flip. We will be committed to this for a few seconds and the bot will ignore other
# logic during that time because we are setting the action.
self.action = Sequence([
ControlStep(duration=0.05, controls=SimpleControllerState(jump=True)),
ControlStep(duration=0.05, controls=SimpleControllerState(jump=False)),
ControlStep(duration=0.2, controls=SimpleControllerState(jump=True, pitch=-1)),
ControlStep(duration=0.8, controls=SimpleControllerState()),
])
# Return the controls associated with the beginning of the sequence so we can start right away.
return self.action.tick(packet)
def is_kickoff(self, ball_location, ball_velocity):
#self.logger.info(ball_location.flat() == kickoff_location)
#self.logger.info(ball_velocity.length() == 0)
return ball_location.flat() == kickoff_location and ball_velocity.length() == 0
'''
class Bot(BaseAgent):
DEVMODE = True
def __init__(self, name, team, index):
super().__init__(name, team, index)
self.info: GameInfo = None
self.draw: DrawingTool = None
self.strat: Strategy = None
self.car = None
self.Actions: Maneuver = None
self.controls: SimpleControllerState = SimpleControllerState()
def initialize_agent(self):
#self.logger.info(rlutilities.__file__)
self.info = GameInfo(self.team)
#for field in self.info._fields_:
# print(field[0], getattr(self.info, field[0]))
self.info.set_mode("soccar")
self.draw = DrawingTool(self.renderer)
self.car = self.info.cars[self.index]
self.logger.info("my index is {}".format(self.index))
self.strat = Strategy(self.info, my_car)
def get_output(self, packet: GameTickPacket):
# Update game data variables
if self.tick_counter < 20:
self.tick_counter += 1
return Input()
if self.Actions is None and not self.Actions.finished:
controls = self.Action.tick(packet)
self.info.read_packet(packet, self.get_field_info(), self.get_ball_path())
self.draw.draw_path(self.get_ball_path())
challenge = ChallengeGame(self.info.cars[self.index], self.info.ball_path)
if challenge.should_go:
self.Action = self.strat.chooseAction(challenge, self.info.ball_path)
self.controls = self.Action.controls
print(self.Action)
if self.info.is_kickoff():
return self.do
self.controls = self.action.doThing(self.info)
if self.DEVMODE:
self.Action.render(self.draw)
challenge.render(self.draw)
return self.controls
def get_ball_path(self):
ball_prediction = self.get_ball_prediction_struct()
path = []
for i in range(0, ball_prediction.num_slices):
prediction_slice = ball_prediction.slices[i]
loc = prediction_slice.physics.location
path.append(loc)
return path
'''
|
flexible
|
{
"blob_id": "1a0d4e77f09b4ce752631ae36a83ff57f96b89b1",
"index": 600,
"step-1": "<mask token>\n\n\nclass MyBot(BaseAgent):\n <mask token>\n\n def initialize_agent(self):\n self.boost_pad_tracker.initialize_boosts(self.get_field_info())\n self.info = MyInfo(self.team, self.index)\n self.strat = Strategy(self.info)\n self.car = Car()\n\n def get_output(self, packet: GameTickPacket) ->SimpleControllerState:\n \"\"\"\n This function will be called by the framework many times per second. This is where you can\n see the motion of the ball, etc. and return controls to drive your car.\n \"\"\"\n self.boost_pad_tracker.update_boost_status(packet)\n self.car.updateCar(packet, self.index)\n self.info.read_packet(packet, self.get_ball_prediction_struct().slices)\n if self.action is None:\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n print(controls.steer)\n return controls\n self.renderer.draw_string_3d(self.car.loc, 1, 1,\n f'Speed: {self.car.vel.length():.1f}', self.renderer.white())\n if self.action.name:\n self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, \n 1, self.action.name, self.renderer.white())\n if packet.game_info.is_kickoff_pause and not isinstance(self.action,\n kickoff):\n self.action = kickoff(self.car.loc)\n controls = self.action.tick(self.info)\n return controls\n if self.action and not self.action.done:\n controls = self.action.tick(self.info)\n if controls is not None:\n return controls\n elif self.action.done:\n print('choosing new action')\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n return controls\n ball_location = Vec3(packet.game_ball.physics.location)\n if self.car.loc.dist(ball_location) > 1500:\n ball_prediction = self.get_ball_prediction_struct()\n ball_in_future = find_slice_at_time(ball_prediction, packet.\n game_info.seconds_elapsed + 2)\n target_location = Vec3(ball_in_future.physics.location)\n self.renderer.draw_line_3d(ball_location, target_location, self\n .renderer.cyan())\n else:\n target_location = ball_location\n \"\"\"\n if 750 < self.car.vel.length() < 800:\n # We'll do a front flip if the car is moving at a certain speed.\n return self.begin_front_flip(packet)\n \n #controls = self.action.controls\n controls = SimpleControllerState()\n controls.steer = steer_toward_target(self.car, target_location)\n controls.throttle = 1.0\n # You can set more controls if you want, like controls.boost.\n \"\"\"\n print('the fuck we doin here?!?!?!?')\n return controls\n\n def begin_front_flip(self, packet):\n self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection\n .Information_IGotIt)\n self.action = Sequence([ControlStep(duration=0.05, controls=\n SimpleControllerState(jump=True)), ControlStep(duration=0.05,\n controls=SimpleControllerState(jump=False)), ControlStep(\n duration=0.2, controls=SimpleControllerState(jump=True, pitch=-\n 1)), ControlStep(duration=0.8, controls=SimpleControllerState())])\n return self.action.tick(packet)\n\n def is_kickoff(self, ball_location, ball_velocity):\n return ball_location.flat(\n ) == kickoff_location and ball_velocity.length() == 0\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyBot(BaseAgent):\n\n def __init__(self, name, team, index):\n super().__init__(name, team, index)\n self.action: Action = kickoff\n self.info: GameInfo = None\n self.car: Car = None\n self.boost_pad_tracker = BoostPadTracker()\n self.stat: Strategy = None\n self.action: Action = None\n\n def initialize_agent(self):\n self.boost_pad_tracker.initialize_boosts(self.get_field_info())\n self.info = MyInfo(self.team, self.index)\n self.strat = Strategy(self.info)\n self.car = Car()\n\n def get_output(self, packet: GameTickPacket) ->SimpleControllerState:\n \"\"\"\n This function will be called by the framework many times per second. This is where you can\n see the motion of the ball, etc. and return controls to drive your car.\n \"\"\"\n self.boost_pad_tracker.update_boost_status(packet)\n self.car.updateCar(packet, self.index)\n self.info.read_packet(packet, self.get_ball_prediction_struct().slices)\n if self.action is None:\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n print(controls.steer)\n return controls\n self.renderer.draw_string_3d(self.car.loc, 1, 1,\n f'Speed: {self.car.vel.length():.1f}', self.renderer.white())\n if self.action.name:\n self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, \n 1, self.action.name, self.renderer.white())\n if packet.game_info.is_kickoff_pause and not isinstance(self.action,\n kickoff):\n self.action = kickoff(self.car.loc)\n controls = self.action.tick(self.info)\n return controls\n if self.action and not self.action.done:\n controls = self.action.tick(self.info)\n if controls is not None:\n return controls\n elif self.action.done:\n print('choosing new action')\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n return controls\n ball_location = Vec3(packet.game_ball.physics.location)\n if self.car.loc.dist(ball_location) > 1500:\n ball_prediction = self.get_ball_prediction_struct()\n ball_in_future = find_slice_at_time(ball_prediction, packet.\n game_info.seconds_elapsed + 2)\n target_location = Vec3(ball_in_future.physics.location)\n self.renderer.draw_line_3d(ball_location, target_location, self\n .renderer.cyan())\n else:\n target_location = ball_location\n \"\"\"\n if 750 < self.car.vel.length() < 800:\n # We'll do a front flip if the car is moving at a certain speed.\n return self.begin_front_flip(packet)\n \n #controls = self.action.controls\n controls = SimpleControllerState()\n controls.steer = steer_toward_target(self.car, target_location)\n controls.throttle = 1.0\n # You can set more controls if you want, like controls.boost.\n \"\"\"\n print('the fuck we doin here?!?!?!?')\n return controls\n\n def begin_front_flip(self, packet):\n self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection\n .Information_IGotIt)\n self.action = Sequence([ControlStep(duration=0.05, controls=\n SimpleControllerState(jump=True)), ControlStep(duration=0.05,\n controls=SimpleControllerState(jump=False)), ControlStep(\n duration=0.2, controls=SimpleControllerState(jump=True, pitch=-\n 1)), ControlStep(duration=0.8, controls=SimpleControllerState())])\n return self.action.tick(packet)\n\n def is_kickoff(self, ball_location, ball_velocity):\n return ball_location.flat(\n ) == kickoff_location and ball_velocity.length() == 0\n\n\n<mask token>\n",
"step-3": "<mask token>\nkickoff_location = Vec3(0, 0, 0)\n\n\nclass MyBot(BaseAgent):\n\n def __init__(self, name, team, index):\n super().__init__(name, team, index)\n self.action: Action = kickoff\n self.info: GameInfo = None\n self.car: Car = None\n self.boost_pad_tracker = BoostPadTracker()\n self.stat: Strategy = None\n self.action: Action = None\n\n def initialize_agent(self):\n self.boost_pad_tracker.initialize_boosts(self.get_field_info())\n self.info = MyInfo(self.team, self.index)\n self.strat = Strategy(self.info)\n self.car = Car()\n\n def get_output(self, packet: GameTickPacket) ->SimpleControllerState:\n \"\"\"\n This function will be called by the framework many times per second. This is where you can\n see the motion of the ball, etc. and return controls to drive your car.\n \"\"\"\n self.boost_pad_tracker.update_boost_status(packet)\n self.car.updateCar(packet, self.index)\n self.info.read_packet(packet, self.get_ball_prediction_struct().slices)\n if self.action is None:\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n print(controls.steer)\n return controls\n self.renderer.draw_string_3d(self.car.loc, 1, 1,\n f'Speed: {self.car.vel.length():.1f}', self.renderer.white())\n if self.action.name:\n self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, \n 1, self.action.name, self.renderer.white())\n if packet.game_info.is_kickoff_pause and not isinstance(self.action,\n kickoff):\n self.action = kickoff(self.car.loc)\n controls = self.action.tick(self.info)\n return controls\n if self.action and not self.action.done:\n controls = self.action.tick(self.info)\n if controls is not None:\n return controls\n elif self.action.done:\n print('choosing new action')\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n return controls\n ball_location = Vec3(packet.game_ball.physics.location)\n if self.car.loc.dist(ball_location) > 1500:\n ball_prediction = self.get_ball_prediction_struct()\n ball_in_future = find_slice_at_time(ball_prediction, packet.\n game_info.seconds_elapsed + 2)\n target_location = Vec3(ball_in_future.physics.location)\n self.renderer.draw_line_3d(ball_location, target_location, self\n .renderer.cyan())\n else:\n target_location = ball_location\n \"\"\"\n if 750 < self.car.vel.length() < 800:\n # We'll do a front flip if the car is moving at a certain speed.\n return self.begin_front_flip(packet)\n \n #controls = self.action.controls\n controls = SimpleControllerState()\n controls.steer = steer_toward_target(self.car, target_location)\n controls.throttle = 1.0\n # You can set more controls if you want, like controls.boost.\n \"\"\"\n print('the fuck we doin here?!?!?!?')\n return controls\n\n def begin_front_flip(self, packet):\n self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection\n .Information_IGotIt)\n self.action = Sequence([ControlStep(duration=0.05, controls=\n SimpleControllerState(jump=True)), ControlStep(duration=0.05,\n controls=SimpleControllerState(jump=False)), ControlStep(\n duration=0.2, controls=SimpleControllerState(jump=True, pitch=-\n 1)), ControlStep(duration=0.8, controls=SimpleControllerState())])\n return self.action.tick(packet)\n\n def is_kickoff(self, ball_location, ball_velocity):\n return ball_location.flat(\n ) == kickoff_location and ball_velocity.length() == 0\n\n\n<mask token>\n",
"step-4": "from rlbot.agents.base_agent import BaseAgent, GameTickPacket, SimpleControllerState\nfrom Decisions.challengeGame import ChallengeGame\nfrom Decisions.info import MyInfo, Car\nfrom Decisions.strat import Strategy\nfrom Drawing.Drawing import DrawingTool\nfrom util.vec import Vec3\nfrom Actions.Kickoff import kickoff\nfrom Actions.Chase import chase\nfrom rlbot.messages.flat.QuickChatSelection import QuickChatSelection\nfrom rlbot.utils.structures.game_data_struct import GameTickPacket\nfrom util.ball_prediction_analysis import find_slice_at_time\nfrom util.boost_pad_tracker import BoostPadTracker\nfrom util.drive import steer_toward_target\nfrom util.sequence import Sequence, ControlStep\nfrom util.vec import Vec3\nimport math\nimport time\nfrom math import radians\nkickoff_location = Vec3(0, 0, 0)\n\n\nclass MyBot(BaseAgent):\n\n def __init__(self, name, team, index):\n super().__init__(name, team, index)\n self.action: Action = kickoff\n self.info: GameInfo = None\n self.car: Car = None\n self.boost_pad_tracker = BoostPadTracker()\n self.stat: Strategy = None\n self.action: Action = None\n\n def initialize_agent(self):\n self.boost_pad_tracker.initialize_boosts(self.get_field_info())\n self.info = MyInfo(self.team, self.index)\n self.strat = Strategy(self.info)\n self.car = Car()\n\n def get_output(self, packet: GameTickPacket) ->SimpleControllerState:\n \"\"\"\n This function will be called by the framework many times per second. This is where you can\n see the motion of the ball, etc. and return controls to drive your car.\n \"\"\"\n self.boost_pad_tracker.update_boost_status(packet)\n self.car.updateCar(packet, self.index)\n self.info.read_packet(packet, self.get_ball_prediction_struct().slices)\n if self.action is None:\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n print(controls.steer)\n return controls\n self.renderer.draw_string_3d(self.car.loc, 1, 1,\n f'Speed: {self.car.vel.length():.1f}', self.renderer.white())\n if self.action.name:\n self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, \n 1, self.action.name, self.renderer.white())\n if packet.game_info.is_kickoff_pause and not isinstance(self.action,\n kickoff):\n self.action = kickoff(self.car.loc)\n controls = self.action.tick(self.info)\n return controls\n if self.action and not self.action.done:\n controls = self.action.tick(self.info)\n if controls is not None:\n return controls\n elif self.action.done:\n print('choosing new action')\n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n return controls\n ball_location = Vec3(packet.game_ball.physics.location)\n if self.car.loc.dist(ball_location) > 1500:\n ball_prediction = self.get_ball_prediction_struct()\n ball_in_future = find_slice_at_time(ball_prediction, packet.\n game_info.seconds_elapsed + 2)\n target_location = Vec3(ball_in_future.physics.location)\n self.renderer.draw_line_3d(ball_location, target_location, self\n .renderer.cyan())\n else:\n target_location = ball_location\n \"\"\"\n if 750 < self.car.vel.length() < 800:\n # We'll do a front flip if the car is moving at a certain speed.\n return self.begin_front_flip(packet)\n \n #controls = self.action.controls\n controls = SimpleControllerState()\n controls.steer = steer_toward_target(self.car, target_location)\n controls.throttle = 1.0\n # You can set more controls if you want, like controls.boost.\n \"\"\"\n print('the fuck we doin here?!?!?!?')\n return controls\n\n def begin_front_flip(self, packet):\n self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection\n .Information_IGotIt)\n self.action = Sequence([ControlStep(duration=0.05, controls=\n SimpleControllerState(jump=True)), ControlStep(duration=0.05,\n controls=SimpleControllerState(jump=False)), ControlStep(\n duration=0.2, controls=SimpleControllerState(jump=True, pitch=-\n 1)), ControlStep(duration=0.8, controls=SimpleControllerState())])\n return self.action.tick(packet)\n\n def is_kickoff(self, ball_location, ball_velocity):\n return ball_location.flat(\n ) == kickoff_location and ball_velocity.length() == 0\n\n\n<mask token>\n",
"step-5": "from rlbot.agents.base_agent import BaseAgent, GameTickPacket, SimpleControllerState\n#from rlbot.utils.structures.game_data_struct import GameTickPacket\nfrom Decisions.challengeGame import ChallengeGame\nfrom Decisions.info import MyInfo, Car\nfrom Decisions.strat import Strategy\nfrom Drawing.Drawing import DrawingTool\nfrom util.vec import Vec3\nfrom Actions.Kickoff import kickoff\nfrom Actions.Chase import chase\n# Blue team's (0) goal is located at (0, -5120) \n# Orange (1) at (0, 5120)\n# ball R = 92\n\nfrom rlbot.messages.flat.QuickChatSelection import QuickChatSelection\nfrom rlbot.utils.structures.game_data_struct import GameTickPacket\n\nfrom util.ball_prediction_analysis import find_slice_at_time\nfrom util.boost_pad_tracker import BoostPadTracker\nfrom util.drive import steer_toward_target\nfrom util.sequence import Sequence, ControlStep\nfrom util.vec import Vec3\n\n\nimport math\nimport time\nfrom math import radians\n\nkickoff_location = Vec3(0, 0, 0)\n\nclass MyBot(BaseAgent):\n\n\n def __init__(self, name, team, index):\n super().__init__(name, team, index) \n self.action: Action = kickoff\n self.info : GameInfo = None\n self.car : Car = None\n self.boost_pad_tracker = BoostPadTracker()\n self.stat : Strategy = None\n self.action : Action = None\n\n def initialize_agent(self):\n # Set up information about the boost pads now that the game is active and the info is available\n self.boost_pad_tracker.initialize_boosts(self.get_field_info())\n self.info = MyInfo(self.team, self.index)\n self.strat = Strategy(self.info)\n self.car = Car()\n\n def get_output(self, packet: GameTickPacket) -> SimpleControllerState:\n \"\"\"\n This function will be called by the framework many times per second. This is where you can\n see the motion of the ball, etc. and return controls to drive your car.\n \"\"\"\n\n # Keep our boost pad info updated with which pads are currently active\n self.boost_pad_tracker.update_boost_status(packet)\n #self.info = self.info.read_packet(packet) \n self.car.updateCar(packet, self.index)\n self.info.read_packet(packet, self.get_ball_prediction_struct().slices)\n #print(\"in main target: {}\".format(self.get_ball_prediction_struct().slices[0].physics.location))\n #self.renderer.draw_line_3d(self.car.loc, target_location, self.renderer.white())\n #self.renderer.draw_rect_3d(target_location, 8, 8, True, self.renderer.cyan(), centered=True)\n \n #cg = ChallengeGame(self.car, bp_struct)\n \n #print(cg.get_time_to_loc(cg.challenge_loc))\n # This is good to keep at the beginning of get_output. It will allow you to continue\n # any sequences that you may have started during a previous call to get_output.\n if self.action is None:\n self.action = self.strat.chooseAction(self.info)\n \n controls = self.action.tick(self.info)\n print(controls.steer)\n return controls\n \n\n self.renderer.draw_string_3d(self.car.loc, 1, 1, f'Speed: {self.car.vel.length():.1f}', self.renderer.white())\n if self.action.name:\n self.renderer.draw_string_3d(self.car.loc + Vec3(0, 0, 20), 1, 1, self.action.name, self.renderer.white())\n \n\n if packet.game_info.is_kickoff_pause and not isinstance(self.action, kickoff):\n #self.logger.info(self.action)\n self.action = kickoff(self.car.loc)\n #print(\"Sequence is: {}\".format(self.action))\n #print(\"Sequence finished: {}\".format(self.action.done))\n controls = self.action.tick(self.info)\n return controls\n \n if self.action and not self.action.done:\n controls = self.action.tick(self.info)\n #print(\"action is: {}\".format(self.action.name))\n if controls is not None:\n return controls\n \n elif self.action.done:\n print(\"choosing new action\")\n \n self.action = self.strat.chooseAction(self.info)\n controls = self.action.tick(self.info)\n return controls\n\n # Gather some information about our car and the ball\n ball_location = Vec3(packet.game_ball.physics.location)\n\n if self.car.loc.dist(ball_location) > 1500:\n # We're far away from the ball, let's try to lead it a little bit\n ball_prediction = self.get_ball_prediction_struct() # This can predict bounces, etc\n ball_in_future = find_slice_at_time(ball_prediction, packet.game_info.seconds_elapsed + 2)\n target_location = Vec3(ball_in_future.physics.location)\n self.renderer.draw_line_3d(ball_location, target_location, self.renderer.cyan())\n else:\n target_location = ball_location\n\n # Draw some things to help understand what the bot is thinking\n #self.renderer.draw_string_2d(100, 100, 1, 1, f'Ball at: {ball_location}', self.renderer.white())\n\n '''\n if 750 < self.car.vel.length() < 800:\n # We'll do a front flip if the car is moving at a certain speed.\n return self.begin_front_flip(packet)\n \n #controls = self.action.controls\n controls = SimpleControllerState()\n controls.steer = steer_toward_target(self.car, target_location)\n controls.throttle = 1.0\n # You can set more controls if you want, like controls.boost.\n '''\n print(\"the fuck we doin here?!?!?!?\")\n return controls\n\n def begin_front_flip(self, packet):\n # Send some quickchat just for fun\n self.send_quick_chat(team_only=False, quick_chat=QuickChatSelection.Information_IGotIt)\n\n # Do a front flip. We will be committed to this for a few seconds and the bot will ignore other\n # logic during that time because we are setting the action.\n self.action = Sequence([\n ControlStep(duration=0.05, controls=SimpleControllerState(jump=True)),\n ControlStep(duration=0.05, controls=SimpleControllerState(jump=False)),\n ControlStep(duration=0.2, controls=SimpleControllerState(jump=True, pitch=-1)),\n ControlStep(duration=0.8, controls=SimpleControllerState()),\n ])\n\n # Return the controls associated with the beginning of the sequence so we can start right away.\n return self.action.tick(packet)\n\n def is_kickoff(self, ball_location, ball_velocity):\n #self.logger.info(ball_location.flat() == kickoff_location)\n #self.logger.info(ball_velocity.length() == 0)\n return ball_location.flat() == kickoff_location and ball_velocity.length() == 0\n\n\n'''\nclass Bot(BaseAgent):\n DEVMODE = True\n\n def __init__(self, name, team, index):\n super().__init__(name, team, index)\n self.info: GameInfo = None\n self.draw: DrawingTool = None\n self.strat: Strategy = None\n self.car = None\n self.Actions: Maneuver = None\n self.controls: SimpleControllerState = SimpleControllerState()\n\n def initialize_agent(self):\n #self.logger.info(rlutilities.__file__)\n self.info = GameInfo(self.team)\n #for field in self.info._fields_:\n # print(field[0], getattr(self.info, field[0]))\n self.info.set_mode(\"soccar\")\n self.draw = DrawingTool(self.renderer)\n self.car = self.info.cars[self.index]\n self.logger.info(\"my index is {}\".format(self.index))\n self.strat = Strategy(self.info, my_car)\n\n def get_output(self, packet: GameTickPacket):\n # Update game data variables\n \n if self.tick_counter < 20:\n self.tick_counter += 1\n return Input()\n \n\n if self.Actions is None and not self.Actions.finished:\n controls = self.Action.tick(packet)\n\n self.info.read_packet(packet, self.get_field_info(), self.get_ball_path())\n\n self.draw.draw_path(self.get_ball_path())\n challenge = ChallengeGame(self.info.cars[self.index], self.info.ball_path)\n\n if challenge.should_go:\n self.Action = self.strat.chooseAction(challenge, self.info.ball_path)\n self.controls = self.Action.controls\n print(self.Action)\n \n if self.info.is_kickoff():\n return self.do\n self.controls = self.action.doThing(self.info)\n \n if self.DEVMODE:\n self.Action.render(self.draw)\n challenge.render(self.draw)\n\n return self.controls\n\n \n def get_ball_path(self):\n ball_prediction = self.get_ball_prediction_struct()\n path = []\n for i in range(0, ball_prediction.num_slices):\n prediction_slice = ball_prediction.slices[i]\n loc = prediction_slice.physics.location\n path.append(loc)\n return path\n\n'''",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
# Stanley H.I. Lio
# hlio@hawaii.edu
# All Rights Reserved. 2018
import logging, time, sys
from serial import Serial
from . import aanderaa_3835
from . import aanderaa_4330f
from . import aanderaa_4531d
from . import aanderaa_4319a
logger = logging.getLogger(__name__)
# works with 3835 (DO), 4330F (DO), 4531D (DO), and 4319A (EC)
def aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835, aanderaa_4319a.parse_4319a]):
logger.debug('aanderaa_read_universal()')
with Serial(port, 9600, timeout=2) as ser:
r = None
for _ in range(max_retry):
ser.flush()
ser.write(b'\r\ndo sample\r\n')
try:
line = ser.readline()
line = filter(lambda c: c <= 0x7f, line)
line = bytearray(filter(lambda c: c not in ['\x11', '\x13'], line)) # the control characters
line = line.decode().strip()
#print([ord(c) for c in line])
if len(line) <= 0:
logger.debug('(no response)')
continue
elif any([c in line for c in '#*']):
logger.debug('(junk)')
logger.debug(line)
logger.debug([ord(c) for c in line])
continue
elif 'SYNTAX ERROR' in line:
logger.debug('(SYNTAX ERROR)')
logger.debug([ord(c) for c in line])
continue
else:
for f in parsers:
logging.debug(f)
try:
r = f(line)
if r is not None and len(r) > 0:
break
except ValueError:
logger.debug('(valueerror)')
else:
time.sleep(1.29)
ser.flush()
except UnicodeDecodeError:
logger.exception('UnicodeDecodeError: {}'.format(line))
ser.flush()
if r is not None and len(r.keys()):
break
time.sleep(1.17)
ser.flush()
return r
if '__main__' == __name__:
logger.setLevel(logging.INFO)
logging.basicConfig(level=logging.INFO)
DEFAULT_PORT = '/dev/ttyS1'
PORT = input('PORT=? (default={})'.format(DEFAULT_PORT)).strip()
if len(PORT) <= 0:
PORT = DEFAULT_PORT
while True:
try:
print(aanderaa_read_universal(PORT))
except KeyboardInterrupt:
print('user interrupted')
break
|
normal
|
{
"blob_id": "c52ad4040c14471319939605c400ff4d4ad982a7",
"index": 5213,
"step-1": "<mask token>\n\n\ndef aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.\n parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835,\n aanderaa_4319a.parse_4319a]):\n logger.debug('aanderaa_read_universal()')\n with Serial(port, 9600, timeout=2) as ser:\n r = None\n for _ in range(max_retry):\n ser.flush()\n ser.write(b'\\r\\ndo sample\\r\\n')\n try:\n line = ser.readline()\n line = filter(lambda c: c <= 127, line)\n line = bytearray(filter(lambda c: c not in ['\\x11', '\\x13'],\n line))\n line = line.decode().strip()\n if len(line) <= 0:\n logger.debug('(no response)')\n continue\n elif any([(c in line) for c in '#*']):\n logger.debug('(junk)')\n logger.debug(line)\n logger.debug([ord(c) for c in line])\n continue\n elif 'SYNTAX ERROR' in line:\n logger.debug('(SYNTAX ERROR)')\n logger.debug([ord(c) for c in line])\n continue\n else:\n for f in parsers:\n logging.debug(f)\n try:\n r = f(line)\n if r is not None and len(r) > 0:\n break\n except ValueError:\n logger.debug('(valueerror)')\n else:\n time.sleep(1.29)\n ser.flush()\n except UnicodeDecodeError:\n logger.exception('UnicodeDecodeError: {}'.format(line))\n ser.flush()\n if r is not None and len(r.keys()):\n break\n time.sleep(1.17)\n ser.flush()\n return r\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.\n parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835,\n aanderaa_4319a.parse_4319a]):\n logger.debug('aanderaa_read_universal()')\n with Serial(port, 9600, timeout=2) as ser:\n r = None\n for _ in range(max_retry):\n ser.flush()\n ser.write(b'\\r\\ndo sample\\r\\n')\n try:\n line = ser.readline()\n line = filter(lambda c: c <= 127, line)\n line = bytearray(filter(lambda c: c not in ['\\x11', '\\x13'],\n line))\n line = line.decode().strip()\n if len(line) <= 0:\n logger.debug('(no response)')\n continue\n elif any([(c in line) for c in '#*']):\n logger.debug('(junk)')\n logger.debug(line)\n logger.debug([ord(c) for c in line])\n continue\n elif 'SYNTAX ERROR' in line:\n logger.debug('(SYNTAX ERROR)')\n logger.debug([ord(c) for c in line])\n continue\n else:\n for f in parsers:\n logging.debug(f)\n try:\n r = f(line)\n if r is not None and len(r) > 0:\n break\n except ValueError:\n logger.debug('(valueerror)')\n else:\n time.sleep(1.29)\n ser.flush()\n except UnicodeDecodeError:\n logger.exception('UnicodeDecodeError: {}'.format(line))\n ser.flush()\n if r is not None and len(r.keys()):\n break\n time.sleep(1.17)\n ser.flush()\n return r\n\n\nif '__main__' == __name__:\n logger.setLevel(logging.INFO)\n logging.basicConfig(level=logging.INFO)\n DEFAULT_PORT = '/dev/ttyS1'\n PORT = input('PORT=? (default={})'.format(DEFAULT_PORT)).strip()\n if len(PORT) <= 0:\n PORT = DEFAULT_PORT\n while True:\n try:\n print(aanderaa_read_universal(PORT))\n except KeyboardInterrupt:\n print('user interrupted')\n break\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__name__)\n\n\ndef aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.\n parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835,\n aanderaa_4319a.parse_4319a]):\n logger.debug('aanderaa_read_universal()')\n with Serial(port, 9600, timeout=2) as ser:\n r = None\n for _ in range(max_retry):\n ser.flush()\n ser.write(b'\\r\\ndo sample\\r\\n')\n try:\n line = ser.readline()\n line = filter(lambda c: c <= 127, line)\n line = bytearray(filter(lambda c: c not in ['\\x11', '\\x13'],\n line))\n line = line.decode().strip()\n if len(line) <= 0:\n logger.debug('(no response)')\n continue\n elif any([(c in line) for c in '#*']):\n logger.debug('(junk)')\n logger.debug(line)\n logger.debug([ord(c) for c in line])\n continue\n elif 'SYNTAX ERROR' in line:\n logger.debug('(SYNTAX ERROR)')\n logger.debug([ord(c) for c in line])\n continue\n else:\n for f in parsers:\n logging.debug(f)\n try:\n r = f(line)\n if r is not None and len(r) > 0:\n break\n except ValueError:\n logger.debug('(valueerror)')\n else:\n time.sleep(1.29)\n ser.flush()\n except UnicodeDecodeError:\n logger.exception('UnicodeDecodeError: {}'.format(line))\n ser.flush()\n if r is not None and len(r.keys()):\n break\n time.sleep(1.17)\n ser.flush()\n return r\n\n\nif '__main__' == __name__:\n logger.setLevel(logging.INFO)\n logging.basicConfig(level=logging.INFO)\n DEFAULT_PORT = '/dev/ttyS1'\n PORT = input('PORT=? (default={})'.format(DEFAULT_PORT)).strip()\n if len(PORT) <= 0:\n PORT = DEFAULT_PORT\n while True:\n try:\n print(aanderaa_read_universal(PORT))\n except KeyboardInterrupt:\n print('user interrupted')\n break\n",
"step-4": "import logging, time, sys\nfrom serial import Serial\nfrom . import aanderaa_3835\nfrom . import aanderaa_4330f\nfrom . import aanderaa_4531d\nfrom . import aanderaa_4319a\nlogger = logging.getLogger(__name__)\n\n\ndef aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.\n parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835,\n aanderaa_4319a.parse_4319a]):\n logger.debug('aanderaa_read_universal()')\n with Serial(port, 9600, timeout=2) as ser:\n r = None\n for _ in range(max_retry):\n ser.flush()\n ser.write(b'\\r\\ndo sample\\r\\n')\n try:\n line = ser.readline()\n line = filter(lambda c: c <= 127, line)\n line = bytearray(filter(lambda c: c not in ['\\x11', '\\x13'],\n line))\n line = line.decode().strip()\n if len(line) <= 0:\n logger.debug('(no response)')\n continue\n elif any([(c in line) for c in '#*']):\n logger.debug('(junk)')\n logger.debug(line)\n logger.debug([ord(c) for c in line])\n continue\n elif 'SYNTAX ERROR' in line:\n logger.debug('(SYNTAX ERROR)')\n logger.debug([ord(c) for c in line])\n continue\n else:\n for f in parsers:\n logging.debug(f)\n try:\n r = f(line)\n if r is not None and len(r) > 0:\n break\n except ValueError:\n logger.debug('(valueerror)')\n else:\n time.sleep(1.29)\n ser.flush()\n except UnicodeDecodeError:\n logger.exception('UnicodeDecodeError: {}'.format(line))\n ser.flush()\n if r is not None and len(r.keys()):\n break\n time.sleep(1.17)\n ser.flush()\n return r\n\n\nif '__main__' == __name__:\n logger.setLevel(logging.INFO)\n logging.basicConfig(level=logging.INFO)\n DEFAULT_PORT = '/dev/ttyS1'\n PORT = input('PORT=? (default={})'.format(DEFAULT_PORT)).strip()\n if len(PORT) <= 0:\n PORT = DEFAULT_PORT\n while True:\n try:\n print(aanderaa_read_universal(PORT))\n except KeyboardInterrupt:\n print('user interrupted')\n break\n",
"step-5": "# Stanley H.I. Lio\n# hlio@hawaii.edu\n# All Rights Reserved. 2018\nimport logging, time, sys\nfrom serial import Serial\nfrom . import aanderaa_3835\nfrom . import aanderaa_4330f\nfrom . import aanderaa_4531d\nfrom . import aanderaa_4319a\n\n\nlogger = logging.getLogger(__name__)\n\n\n# works with 3835 (DO), 4330F (DO), 4531D (DO), and 4319A (EC)\ndef aanderaa_read_universal(port, max_retry=3, parsers=[aanderaa_4531d.parse_4531d, aanderaa_4330f.parse_4330f, aanderaa_3835.parse_3835, aanderaa_4319a.parse_4319a]):\n logger.debug('aanderaa_read_universal()')\n \n with Serial(port, 9600, timeout=2) as ser:\n\n r = None\n for _ in range(max_retry):\n\n ser.flush()\n ser.write(b'\\r\\ndo sample\\r\\n')\n try:\n line = ser.readline()\n line = filter(lambda c: c <= 0x7f, line)\n line = bytearray(filter(lambda c: c not in ['\\x11', '\\x13'], line)) # the control characters\n line = line.decode().strip()\n #print([ord(c) for c in line])\n\n if len(line) <= 0:\n logger.debug('(no response)') \n continue\n elif any([c in line for c in '#*']):\n logger.debug('(junk)')\n logger.debug(line)\n logger.debug([ord(c) for c in line])\n continue\n elif 'SYNTAX ERROR' in line:\n logger.debug('(SYNTAX ERROR)')\n logger.debug([ord(c) for c in line])\n continue\n else:\n for f in parsers:\n logging.debug(f)\n try:\n r = f(line)\n if r is not None and len(r) > 0:\n break\n except ValueError:\n logger.debug('(valueerror)')\n else:\n time.sleep(1.29)\n ser.flush()\n\n except UnicodeDecodeError:\n logger.exception('UnicodeDecodeError: {}'.format(line))\n ser.flush()\n\n if r is not None and len(r.keys()):\n break\n\n time.sleep(1.17)\n\n ser.flush()\n return r\n\n\nif '__main__' == __name__:\n\n logger.setLevel(logging.INFO)\n logging.basicConfig(level=logging.INFO)\n\n DEFAULT_PORT = '/dev/ttyS1'\n PORT = input('PORT=? (default={})'.format(DEFAULT_PORT)).strip()\n if len(PORT) <= 0:\n PORT = DEFAULT_PORT\n\n while True:\n try:\n print(aanderaa_read_universal(PORT))\n except KeyboardInterrupt:\n print('user interrupted')\n break\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def get_stock_time_series(data_df, stock_id):
curr_ID_data = data_df.loc[stock_id]
output = np.array(curr_ID_data[0])
for i in range(1, len(curr_ID_data.index)):
output = np.vstack((output, curr_ID_data[i]))
return output
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_stock_time_series(data_df, stock_id):
curr_ID_data = data_df.loc[stock_id]
output = np.array(curr_ID_data[0])
for i in range(1, len(curr_ID_data.index)):
output = np.vstack((output, curr_ID_data[i]))
return output
<|reserved_special_token_0|>
for ETF_ID in ETF_ID_list:
Nm_conf = conf.config('feature_conf').config['Nm']
if Nm_conf['enable'] is True:
Nm_method = Nm_conf['method']
file_postfix = '_Nm_' + str(Nm_conf['type'][0]
) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'
else:
file_postfix = '_' + str(94) + '.pkl'
src_file_path = './Data/all_feature_data' + file_postfix
meta_file_path = './Data/all_meta_data' + file_postfix
data = pd.read_pickle(src_file_path)
f = open(meta_file_path, 'rb')
tasharep_ID = pickle.load(f)
member_ID = pickle.load(f)
Date = pickle.load(f)
feature_list = pickle.load(f)
price_scaler = pickle.load(f)
trade_scaler = pickle.load(f)
f_idx = 59
src_time_period = ['20000101', '20180511']
eval_time_period = ['20180402', '20180518']
eval_time_len = Date.index(eval_time_period[1]) - Date.index(
eval_time_period[0]) + 1
total_acc = 0
for day_shift in range(eval_time_len - 5):
eval_start_date = Date.index(eval_time_period[0]) + day_shift
target_start_date = eval_start_date - 21
target_time_period = [Date[target_start_date], Date[eval_start_date]]
next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]
date_mask = (data.columns > src_time_period[0]) & (data.columns <=
src_time_period[1])
src_data = data.iloc[:, date_mask]
date_mask = (data.columns > target_time_period[0]) & (data.columns <=
target_time_period[1])
target_data = data.iloc[:, date_mask]
date_mask = (data.columns > next_time_period[0]) & (data.columns <=
next_time_period[1])
next_data = data.iloc[:, date_mask]
src_TS = get_stock_time_series(src_data, ETF_ID)
target_TS = get_stock_time_series(target_data, ETF_ID)
next_TS = get_stock_time_series(next_data, ETF_ID)
overall_TS = get_stock_time_series(data, ETF_ID)
target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],
mode='valid')
target_len = len(target_TS)
max_target_xcorr_idx = target_xcorr.argsort()[::-1]
predict_target_idx = max_target_xcorr_idx + target_len
next_len = len(next_TS)
max_next_xcorr_idx = next_xcorr.argsort()[::-1]
top_num = 10
acc = []
label = np.argmax(next_TS[:, -3:], axis=-1)
for idx in max_target_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
max_acc_idx = np.argsort(acc)[::-1]
output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in
max_acc_idx]
top_num = 3
avg_acc = 0
acc = []
for idx in output_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
avg_acc = avg_acc + acc[-1]
print('Avg. Acc.: [{}]'.format(avg_acc / top_num))
total_acc = total_acc + avg_acc / top_num
print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (
eval_time_len - 5)))
output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]
<|reserved_special_token_0|>
pickle.dump(output_date, f, True)
f.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_stock_time_series(data_df, stock_id):
curr_ID_data = data_df.loc[stock_id]
output = np.array(curr_ID_data[0])
for i in range(1, len(curr_ID_data.index)):
output = np.vstack((output, curr_ID_data[i]))
return output
ID_conf = conf.config('feature_conf').config['ID']
ETF_ID_list = ['0050', '0052', '0053', '0054', '0055', '0056', '0057',
'0058', '0059', '006201', '006203', '006204', '006208']
output_date = {}
for ETF_ID in ETF_ID_list:
Nm_conf = conf.config('feature_conf').config['Nm']
if Nm_conf['enable'] is True:
Nm_method = Nm_conf['method']
file_postfix = '_Nm_' + str(Nm_conf['type'][0]
) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'
else:
file_postfix = '_' + str(94) + '.pkl'
src_file_path = './Data/all_feature_data' + file_postfix
meta_file_path = './Data/all_meta_data' + file_postfix
data = pd.read_pickle(src_file_path)
f = open(meta_file_path, 'rb')
tasharep_ID = pickle.load(f)
member_ID = pickle.load(f)
Date = pickle.load(f)
feature_list = pickle.load(f)
price_scaler = pickle.load(f)
trade_scaler = pickle.load(f)
f_idx = 59
src_time_period = ['20000101', '20180511']
eval_time_period = ['20180402', '20180518']
eval_time_len = Date.index(eval_time_period[1]) - Date.index(
eval_time_period[0]) + 1
total_acc = 0
for day_shift in range(eval_time_len - 5):
eval_start_date = Date.index(eval_time_period[0]) + day_shift
target_start_date = eval_start_date - 21
target_time_period = [Date[target_start_date], Date[eval_start_date]]
next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]
date_mask = (data.columns > src_time_period[0]) & (data.columns <=
src_time_period[1])
src_data = data.iloc[:, date_mask]
date_mask = (data.columns > target_time_period[0]) & (data.columns <=
target_time_period[1])
target_data = data.iloc[:, date_mask]
date_mask = (data.columns > next_time_period[0]) & (data.columns <=
next_time_period[1])
next_data = data.iloc[:, date_mask]
src_TS = get_stock_time_series(src_data, ETF_ID)
target_TS = get_stock_time_series(target_data, ETF_ID)
next_TS = get_stock_time_series(next_data, ETF_ID)
overall_TS = get_stock_time_series(data, ETF_ID)
target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],
mode='valid')
target_len = len(target_TS)
max_target_xcorr_idx = target_xcorr.argsort()[::-1]
predict_target_idx = max_target_xcorr_idx + target_len
next_len = len(next_TS)
max_next_xcorr_idx = next_xcorr.argsort()[::-1]
top_num = 10
acc = []
label = np.argmax(next_TS[:, -3:], axis=-1)
for idx in max_target_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
max_acc_idx = np.argsort(acc)[::-1]
output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in
max_acc_idx]
top_num = 3
avg_acc = 0
acc = []
for idx in output_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
avg_acc = avg_acc + acc[-1]
print('Avg. Acc.: [{}]'.format(avg_acc / top_num))
total_acc = total_acc + avg_acc / top_num
print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (
eval_time_len - 5)))
output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]
f = open('./Data/xcorr_date_data.pkl', 'wb')
pickle.dump(output_date, f, True)
f.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
import pandas as pd
import pickle
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
import config as conf
def get_stock_time_series(data_df, stock_id):
curr_ID_data = data_df.loc[stock_id]
output = np.array(curr_ID_data[0])
for i in range(1, len(curr_ID_data.index)):
output = np.vstack((output, curr_ID_data[i]))
return output
ID_conf = conf.config('feature_conf').config['ID']
ETF_ID_list = ['0050', '0052', '0053', '0054', '0055', '0056', '0057',
'0058', '0059', '006201', '006203', '006204', '006208']
output_date = {}
for ETF_ID in ETF_ID_list:
Nm_conf = conf.config('feature_conf').config['Nm']
if Nm_conf['enable'] is True:
Nm_method = Nm_conf['method']
file_postfix = '_Nm_' + str(Nm_conf['type'][0]
) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'
else:
file_postfix = '_' + str(94) + '.pkl'
src_file_path = './Data/all_feature_data' + file_postfix
meta_file_path = './Data/all_meta_data' + file_postfix
data = pd.read_pickle(src_file_path)
f = open(meta_file_path, 'rb')
tasharep_ID = pickle.load(f)
member_ID = pickle.load(f)
Date = pickle.load(f)
feature_list = pickle.load(f)
price_scaler = pickle.load(f)
trade_scaler = pickle.load(f)
f_idx = 59
src_time_period = ['20000101', '20180511']
eval_time_period = ['20180402', '20180518']
eval_time_len = Date.index(eval_time_period[1]) - Date.index(
eval_time_period[0]) + 1
total_acc = 0
for day_shift in range(eval_time_len - 5):
eval_start_date = Date.index(eval_time_period[0]) + day_shift
target_start_date = eval_start_date - 21
target_time_period = [Date[target_start_date], Date[eval_start_date]]
next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]
date_mask = (data.columns > src_time_period[0]) & (data.columns <=
src_time_period[1])
src_data = data.iloc[:, date_mask]
date_mask = (data.columns > target_time_period[0]) & (data.columns <=
target_time_period[1])
target_data = data.iloc[:, date_mask]
date_mask = (data.columns > next_time_period[0]) & (data.columns <=
next_time_period[1])
next_data = data.iloc[:, date_mask]
src_TS = get_stock_time_series(src_data, ETF_ID)
target_TS = get_stock_time_series(target_data, ETF_ID)
next_TS = get_stock_time_series(next_data, ETF_ID)
overall_TS = get_stock_time_series(data, ETF_ID)
target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],
mode='valid')
target_len = len(target_TS)
max_target_xcorr_idx = target_xcorr.argsort()[::-1]
predict_target_idx = max_target_xcorr_idx + target_len
next_len = len(next_TS)
max_next_xcorr_idx = next_xcorr.argsort()[::-1]
top_num = 10
acc = []
label = np.argmax(next_TS[:, -3:], axis=-1)
for idx in max_target_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
max_acc_idx = np.argsort(acc)[::-1]
output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in
max_acc_idx]
top_num = 3
avg_acc = 0
acc = []
for idx in output_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:
predict_target_idx[idx] + next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
avg_acc = avg_acc + acc[-1]
print('Avg. Acc.: [{}]'.format(avg_acc / top_num))
total_acc = total_acc + avg_acc / top_num
print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (
eval_time_len - 5)))
output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]
f = open('./Data/xcorr_date_data.pkl', 'wb')
pickle.dump(output_date, f, True)
f.close()
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Tue May 22 15:01:21 2018
@author: Weiyu_Lee
"""
import numpy as np
import pandas as pd
import pickle
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
import config as conf
def get_stock_time_series(data_df, stock_id):
curr_ID_data = data_df.loc[stock_id]
output = np.array(curr_ID_data[0])
for i in range(1, len(curr_ID_data.index)):
output = np.vstack((output, curr_ID_data[i]))
return output
ID_conf = conf.config('feature_conf').config['ID']
#ETF_ID = ID_conf["ID"]
#ETF_ID_list = ["0050"]
ETF_ID_list = ["0050", "0052", "0053", "0054", "0055", "0056", "0057", "0058", "0059",
"006201", "006203", "006204", "006208"]
output_date = {}
for ETF_ID in ETF_ID_list:
Nm_conf = conf.config('feature_conf').config['Nm']
if Nm_conf["enable"] is True:
Nm_method = Nm_conf["method"]
file_postfix = '_Nm_' + str(Nm_conf["type"][0]) + '_' + Nm_method + '_' + str(94) + "_" + ETF_ID + '.pkl'
else:
file_postfix = "_" + str(94) + '.pkl'
src_file_path = './Data/all_feature_data' + file_postfix
meta_file_path = './Data/all_meta_data' + file_postfix
data = pd.read_pickle(src_file_path)
f = open(meta_file_path, "rb")
tasharep_ID = pickle.load(f)
member_ID = pickle.load(f)
Date = pickle.load(f)
feature_list = pickle.load(f)
price_scaler = pickle.load(f)
trade_scaler = pickle.load(f)
f_idx = 59 # MACD
src_time_period = ['20000101', '20180511']
# eval_time_period = ['20180511', '20180518']
eval_time_period = ['20180402', '20180518']
eval_time_len = Date.index(eval_time_period[1]) - Date.index(eval_time_period[0]) + 1
total_acc = 0
for day_shift in range(eval_time_len-5):
eval_start_date = Date.index(eval_time_period[0]) + day_shift
target_start_date = eval_start_date - 21
target_time_period = [Date[target_start_date], Date[eval_start_date]]
next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]
date_mask = (data.columns > src_time_period[0]) & (data.columns <= src_time_period[1])
src_data = data.iloc[:, date_mask]
date_mask = (data.columns > target_time_period[0]) & (data.columns <= target_time_period[1])
target_data = data.iloc[:, date_mask]
date_mask = (data.columns > next_time_period[0]) & (data.columns <= next_time_period[1])
next_data = data.iloc[:, date_mask]
src_TS = get_stock_time_series(src_data, ETF_ID)
target_TS = get_stock_time_series(target_data, ETF_ID)
next_TS = get_stock_time_series(next_data, ETF_ID)
overall_TS = get_stock_time_series(data, ETF_ID)
target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx], mode='valid')
# next_xcorr = np.correlate(src_TS[:, f_idx], next_TS[:, f_idx], mode='valid')
target_len = len(target_TS)
max_target_xcorr_idx = target_xcorr.argsort()[::-1]
predict_target_idx = max_target_xcorr_idx + target_len
next_len = len(next_TS)
max_next_xcorr_idx = next_xcorr.argsort()[::-1]
# plt.plot(target_xcorr)
# plt.savefig("target_xcorr_{}.png".format(ETF_ID))
#for idx in max_target_xcorr_idx[:10]:
# plt.figure()
# plt.plot(target_TS[:, 84])
# plt.plot(src_TS[max_target_xcorr_idx[idx]:max_target_xcorr_idx[idx]+target_len, 84])
#plt.figure()
#plt.plot(target_xcorr)
#plt.plot(next_xcorr)
top_num = 10
acc = []
label = np.argmax(next_TS[:, -3:], axis=-1)
for idx in max_target_xcorr_idx[:top_num]:
predict = np.argmax(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
max_acc_idx = np.argsort(acc)[::-1]
output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in max_acc_idx]
top_num = 3
avg_acc = 0
acc = []
for idx in output_xcorr_idx[:top_num]:
#plt.figure()
#plt.plot(next_TS[:, 84])
#plt.plot(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, 84])
#plt.figure()
#plt.plot(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, 3])
predict = np.argmax(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, -3:], axis=-1)
acc.append(sum(label == predict) / next_len)
avg_acc = avg_acc + acc[-1]
#print("Acc.: [{}]".format(acc[-1]))
print("Avg. Acc.: [{}]".format(avg_acc/top_num))
total_acc = total_acc + avg_acc/top_num
print("[{}] Overall Acc.: [{}]".format(ETF_ID, total_acc/(eval_time_len-5)))
output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]
f = open('./Data/xcorr_date_data.pkl', 'wb')
pickle.dump(output_date, f, True)
f.close()
|
flexible
|
{
"blob_id": "6a7e5a78f516cecf083ca3900bdaaf427bedd497",
"index": 756,
"step-1": "<mask token>\n\n\ndef get_stock_time_series(data_df, stock_id):\n curr_ID_data = data_df.loc[stock_id]\n output = np.array(curr_ID_data[0])\n for i in range(1, len(curr_ID_data.index)):\n output = np.vstack((output, curr_ID_data[i]))\n return output\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_stock_time_series(data_df, stock_id):\n curr_ID_data = data_df.loc[stock_id]\n output = np.array(curr_ID_data[0])\n for i in range(1, len(curr_ID_data.index)):\n output = np.vstack((output, curr_ID_data[i]))\n return output\n\n\n<mask token>\nfor ETF_ID in ETF_ID_list:\n Nm_conf = conf.config('feature_conf').config['Nm']\n if Nm_conf['enable'] is True:\n Nm_method = Nm_conf['method']\n file_postfix = '_Nm_' + str(Nm_conf['type'][0]\n ) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'\n else:\n file_postfix = '_' + str(94) + '.pkl'\n src_file_path = './Data/all_feature_data' + file_postfix\n meta_file_path = './Data/all_meta_data' + file_postfix\n data = pd.read_pickle(src_file_path)\n f = open(meta_file_path, 'rb')\n tasharep_ID = pickle.load(f)\n member_ID = pickle.load(f)\n Date = pickle.load(f)\n feature_list = pickle.load(f)\n price_scaler = pickle.load(f)\n trade_scaler = pickle.load(f)\n f_idx = 59\n src_time_period = ['20000101', '20180511']\n eval_time_period = ['20180402', '20180518']\n eval_time_len = Date.index(eval_time_period[1]) - Date.index(\n eval_time_period[0]) + 1\n total_acc = 0\n for day_shift in range(eval_time_len - 5):\n eval_start_date = Date.index(eval_time_period[0]) + day_shift\n target_start_date = eval_start_date - 21\n target_time_period = [Date[target_start_date], Date[eval_start_date]]\n next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]\n date_mask = (data.columns > src_time_period[0]) & (data.columns <=\n src_time_period[1])\n src_data = data.iloc[:, date_mask]\n date_mask = (data.columns > target_time_period[0]) & (data.columns <=\n target_time_period[1])\n target_data = data.iloc[:, date_mask]\n date_mask = (data.columns > next_time_period[0]) & (data.columns <=\n next_time_period[1])\n next_data = data.iloc[:, date_mask]\n src_TS = get_stock_time_series(src_data, ETF_ID)\n target_TS = get_stock_time_series(target_data, ETF_ID)\n next_TS = get_stock_time_series(next_data, ETF_ID)\n overall_TS = get_stock_time_series(data, ETF_ID)\n target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],\n mode='valid')\n target_len = len(target_TS)\n max_target_xcorr_idx = target_xcorr.argsort()[::-1]\n predict_target_idx = max_target_xcorr_idx + target_len\n next_len = len(next_TS)\n max_next_xcorr_idx = next_xcorr.argsort()[::-1]\n top_num = 10\n acc = []\n label = np.argmax(next_TS[:, -3:], axis=-1)\n for idx in max_target_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n max_acc_idx = np.argsort(acc)[::-1]\n output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in\n max_acc_idx]\n top_num = 3\n avg_acc = 0\n acc = []\n for idx in output_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n avg_acc = avg_acc + acc[-1]\n print('Avg. Acc.: [{}]'.format(avg_acc / top_num))\n total_acc = total_acc + avg_acc / top_num\n print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (\n eval_time_len - 5)))\n output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]\n<mask token>\npickle.dump(output_date, f, True)\nf.close()\n",
"step-3": "<mask token>\n\n\ndef get_stock_time_series(data_df, stock_id):\n curr_ID_data = data_df.loc[stock_id]\n output = np.array(curr_ID_data[0])\n for i in range(1, len(curr_ID_data.index)):\n output = np.vstack((output, curr_ID_data[i]))\n return output\n\n\nID_conf = conf.config('feature_conf').config['ID']\nETF_ID_list = ['0050', '0052', '0053', '0054', '0055', '0056', '0057',\n '0058', '0059', '006201', '006203', '006204', '006208']\noutput_date = {}\nfor ETF_ID in ETF_ID_list:\n Nm_conf = conf.config('feature_conf').config['Nm']\n if Nm_conf['enable'] is True:\n Nm_method = Nm_conf['method']\n file_postfix = '_Nm_' + str(Nm_conf['type'][0]\n ) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'\n else:\n file_postfix = '_' + str(94) + '.pkl'\n src_file_path = './Data/all_feature_data' + file_postfix\n meta_file_path = './Data/all_meta_data' + file_postfix\n data = pd.read_pickle(src_file_path)\n f = open(meta_file_path, 'rb')\n tasharep_ID = pickle.load(f)\n member_ID = pickle.load(f)\n Date = pickle.load(f)\n feature_list = pickle.load(f)\n price_scaler = pickle.load(f)\n trade_scaler = pickle.load(f)\n f_idx = 59\n src_time_period = ['20000101', '20180511']\n eval_time_period = ['20180402', '20180518']\n eval_time_len = Date.index(eval_time_period[1]) - Date.index(\n eval_time_period[0]) + 1\n total_acc = 0\n for day_shift in range(eval_time_len - 5):\n eval_start_date = Date.index(eval_time_period[0]) + day_shift\n target_start_date = eval_start_date - 21\n target_time_period = [Date[target_start_date], Date[eval_start_date]]\n next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]\n date_mask = (data.columns > src_time_period[0]) & (data.columns <=\n src_time_period[1])\n src_data = data.iloc[:, date_mask]\n date_mask = (data.columns > target_time_period[0]) & (data.columns <=\n target_time_period[1])\n target_data = data.iloc[:, date_mask]\n date_mask = (data.columns > next_time_period[0]) & (data.columns <=\n next_time_period[1])\n next_data = data.iloc[:, date_mask]\n src_TS = get_stock_time_series(src_data, ETF_ID)\n target_TS = get_stock_time_series(target_data, ETF_ID)\n next_TS = get_stock_time_series(next_data, ETF_ID)\n overall_TS = get_stock_time_series(data, ETF_ID)\n target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],\n mode='valid')\n target_len = len(target_TS)\n max_target_xcorr_idx = target_xcorr.argsort()[::-1]\n predict_target_idx = max_target_xcorr_idx + target_len\n next_len = len(next_TS)\n max_next_xcorr_idx = next_xcorr.argsort()[::-1]\n top_num = 10\n acc = []\n label = np.argmax(next_TS[:, -3:], axis=-1)\n for idx in max_target_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n max_acc_idx = np.argsort(acc)[::-1]\n output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in\n max_acc_idx]\n top_num = 3\n avg_acc = 0\n acc = []\n for idx in output_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n avg_acc = avg_acc + acc[-1]\n print('Avg. Acc.: [{}]'.format(avg_acc / top_num))\n total_acc = total_acc + avg_acc / top_num\n print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (\n eval_time_len - 5)))\n output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]\nf = open('./Data/xcorr_date_data.pkl', 'wb')\npickle.dump(output_date, f, True)\nf.close()\n",
"step-4": "<mask token>\nimport numpy as np\nimport pandas as pd\nimport pickle\nimport matplotlib.pyplot as plt\nfrom datetime import datetime, timedelta\nimport config as conf\n\n\ndef get_stock_time_series(data_df, stock_id):\n curr_ID_data = data_df.loc[stock_id]\n output = np.array(curr_ID_data[0])\n for i in range(1, len(curr_ID_data.index)):\n output = np.vstack((output, curr_ID_data[i]))\n return output\n\n\nID_conf = conf.config('feature_conf').config['ID']\nETF_ID_list = ['0050', '0052', '0053', '0054', '0055', '0056', '0057',\n '0058', '0059', '006201', '006203', '006204', '006208']\noutput_date = {}\nfor ETF_ID in ETF_ID_list:\n Nm_conf = conf.config('feature_conf').config['Nm']\n if Nm_conf['enable'] is True:\n Nm_method = Nm_conf['method']\n file_postfix = '_Nm_' + str(Nm_conf['type'][0]\n ) + '_' + Nm_method + '_' + str(94) + '_' + ETF_ID + '.pkl'\n else:\n file_postfix = '_' + str(94) + '.pkl'\n src_file_path = './Data/all_feature_data' + file_postfix\n meta_file_path = './Data/all_meta_data' + file_postfix\n data = pd.read_pickle(src_file_path)\n f = open(meta_file_path, 'rb')\n tasharep_ID = pickle.load(f)\n member_ID = pickle.load(f)\n Date = pickle.load(f)\n feature_list = pickle.load(f)\n price_scaler = pickle.load(f)\n trade_scaler = pickle.load(f)\n f_idx = 59\n src_time_period = ['20000101', '20180511']\n eval_time_period = ['20180402', '20180518']\n eval_time_len = Date.index(eval_time_period[1]) - Date.index(\n eval_time_period[0]) + 1\n total_acc = 0\n for day_shift in range(eval_time_len - 5):\n eval_start_date = Date.index(eval_time_period[0]) + day_shift\n target_start_date = eval_start_date - 21\n target_time_period = [Date[target_start_date], Date[eval_start_date]]\n next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]]\n date_mask = (data.columns > src_time_period[0]) & (data.columns <=\n src_time_period[1])\n src_data = data.iloc[:, date_mask]\n date_mask = (data.columns > target_time_period[0]) & (data.columns <=\n target_time_period[1])\n target_data = data.iloc[:, date_mask]\n date_mask = (data.columns > next_time_period[0]) & (data.columns <=\n next_time_period[1])\n next_data = data.iloc[:, date_mask]\n src_TS = get_stock_time_series(src_data, ETF_ID)\n target_TS = get_stock_time_series(target_data, ETF_ID)\n next_TS = get_stock_time_series(next_data, ETF_ID)\n overall_TS = get_stock_time_series(data, ETF_ID)\n target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx],\n mode='valid')\n target_len = len(target_TS)\n max_target_xcorr_idx = target_xcorr.argsort()[::-1]\n predict_target_idx = max_target_xcorr_idx + target_len\n next_len = len(next_TS)\n max_next_xcorr_idx = next_xcorr.argsort()[::-1]\n top_num = 10\n acc = []\n label = np.argmax(next_TS[:, -3:], axis=-1)\n for idx in max_target_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n max_acc_idx = np.argsort(acc)[::-1]\n output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in\n max_acc_idx]\n top_num = 3\n avg_acc = 0\n acc = []\n for idx in output_xcorr_idx[:top_num]:\n predict = np.argmax(overall_TS[predict_target_idx[idx]:\n predict_target_idx[idx] + next_len, -3:], axis=-1)\n acc.append(sum(label == predict) / next_len)\n avg_acc = avg_acc + acc[-1]\n print('Avg. Acc.: [{}]'.format(avg_acc / top_num))\n total_acc = total_acc + avg_acc / top_num\n print('[{}] Overall Acc.: [{}]'.format(ETF_ID, total_acc / (\n eval_time_len - 5)))\n output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]\nf = open('./Data/xcorr_date_data.pkl', 'wb')\npickle.dump(output_date, f, True)\nf.close()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue May 22 15:01:21 2018\r\n\r\n@author: Weiyu_Lee\r\n\"\"\"\r\nimport numpy as np\r\nimport pandas as pd\r\nimport pickle\r\nimport matplotlib.pyplot as plt\r\nfrom datetime import datetime, timedelta\r\n\r\nimport config as conf\r\n\r\ndef get_stock_time_series(data_df, stock_id):\r\n \r\n curr_ID_data = data_df.loc[stock_id]\r\n\r\n output = np.array(curr_ID_data[0])\r\n for i in range(1, len(curr_ID_data.index)):\r\n output = np.vstack((output, curr_ID_data[i]))\r\n \r\n return output \r\n\r\nID_conf = conf.config('feature_conf').config['ID']\r\n#ETF_ID = ID_conf[\"ID\"]\r\n#ETF_ID_list = [\"0050\"]\r\nETF_ID_list = [\"0050\", \"0052\", \"0053\", \"0054\", \"0055\", \"0056\", \"0057\", \"0058\", \"0059\", \r\n \"006201\", \"006203\", \"006204\", \"006208\"]\r\n\r\noutput_date = {}\r\nfor ETF_ID in ETF_ID_list:\r\n Nm_conf = conf.config('feature_conf').config['Nm']\r\n if Nm_conf[\"enable\"] is True:\r\n Nm_method = Nm_conf[\"method\"]\r\n file_postfix = '_Nm_' + str(Nm_conf[\"type\"][0]) + '_' + Nm_method + '_' + str(94) + \"_\" + ETF_ID + '.pkl'\r\n else:\r\n file_postfix = \"_\" + str(94) + '.pkl'\r\n \r\n src_file_path = './Data/all_feature_data' + file_postfix\r\n meta_file_path = './Data/all_meta_data' + file_postfix\r\n \r\n data = pd.read_pickle(src_file_path)\r\n \r\n f = open(meta_file_path, \"rb\")\r\n tasharep_ID = pickle.load(f)\r\n member_ID = pickle.load(f)\r\n Date = pickle.load(f)\r\n feature_list = pickle.load(f)\r\n price_scaler = pickle.load(f)\r\n trade_scaler = pickle.load(f)\r\n \r\n f_idx = 59 # MACD\r\n \r\n src_time_period = ['20000101', '20180511']\r\n# eval_time_period = ['20180511', '20180518']\r\n eval_time_period = ['20180402', '20180518']\r\n eval_time_len = Date.index(eval_time_period[1]) - Date.index(eval_time_period[0]) + 1\r\n \r\n total_acc = 0\r\n for day_shift in range(eval_time_len-5):\r\n \r\n eval_start_date = Date.index(eval_time_period[0]) + day_shift\r\n target_start_date = eval_start_date - 21\r\n \r\n target_time_period = [Date[target_start_date], Date[eval_start_date]]\r\n next_time_period = [Date[eval_start_date], Date[eval_start_date + 5]] \r\n \r\n date_mask = (data.columns > src_time_period[0]) & (data.columns <= src_time_period[1])\r\n src_data = data.iloc[:, date_mask]\r\n \r\n date_mask = (data.columns > target_time_period[0]) & (data.columns <= target_time_period[1])\r\n target_data = data.iloc[:, date_mask]\r\n \r\n date_mask = (data.columns > next_time_period[0]) & (data.columns <= next_time_period[1])\r\n next_data = data.iloc[:, date_mask]\r\n \r\n src_TS = get_stock_time_series(src_data, ETF_ID)\r\n target_TS = get_stock_time_series(target_data, ETF_ID)\r\n next_TS = get_stock_time_series(next_data, ETF_ID)\r\n overall_TS = get_stock_time_series(data, ETF_ID)\r\n \r\n target_xcorr = np.correlate(src_TS[:, f_idx], target_TS[:, f_idx], mode='valid')\r\n# next_xcorr = np.correlate(src_TS[:, f_idx], next_TS[:, f_idx], mode='valid')\r\n \r\n target_len = len(target_TS)\r\n max_target_xcorr_idx = target_xcorr.argsort()[::-1]\r\n predict_target_idx = max_target_xcorr_idx + target_len\r\n \r\n next_len = len(next_TS)\r\n max_next_xcorr_idx = next_xcorr.argsort()[::-1]\r\n \r\n # plt.plot(target_xcorr)\r\n # plt.savefig(\"target_xcorr_{}.png\".format(ETF_ID))\r\n \r\n #for idx in max_target_xcorr_idx[:10]:\r\n # plt.figure()\r\n # plt.plot(target_TS[:, 84])\r\n # plt.plot(src_TS[max_target_xcorr_idx[idx]:max_target_xcorr_idx[idx]+target_len, 84])\r\n \r\n #plt.figure()\r\n #plt.plot(target_xcorr)\r\n #plt.plot(next_xcorr)\r\n \r\n top_num = 10\r\n acc = []\r\n label = np.argmax(next_TS[:, -3:], axis=-1)\r\n for idx in max_target_xcorr_idx[:top_num]:\r\n predict = np.argmax(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, -3:], axis=-1)\r\n acc.append(sum(label == predict) / next_len)\r\n \r\n max_acc_idx = np.argsort(acc)[::-1]\r\n output_xcorr_idx = [max_target_xcorr_idx[acc_idx] for acc_idx in max_acc_idx]\r\n \r\n top_num = 3\r\n avg_acc = 0\r\n acc = []\r\n for idx in output_xcorr_idx[:top_num]:\r\n #plt.figure()\r\n #plt.plot(next_TS[:, 84])\r\n #plt.plot(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, 84]) \r\n #plt.figure()\r\n #plt.plot(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, 3])\r\n \r\n predict = np.argmax(overall_TS[predict_target_idx[idx]:predict_target_idx[idx]+next_len, -3:], axis=-1)\r\n acc.append(sum(label == predict) / next_len)\r\n \r\n avg_acc = avg_acc + acc[-1]\r\n #print(\"Acc.: [{}]\".format(acc[-1]))\r\n \r\n print(\"Avg. Acc.: [{}]\".format(avg_acc/top_num))\r\n \r\n total_acc = total_acc + avg_acc/top_num\r\n \r\n print(\"[{}] Overall Acc.: [{}]\".format(ETF_ID, total_acc/(eval_time_len-5)))\r\n output_date[ETF_ID] = [Date[i] for i in output_xcorr_idx[:10]]\r\n\r\nf = open('./Data/xcorr_date_data.pkl', 'wb')\r\npickle.dump(output_date, f, True) \r\nf.close()\r\n\r\n\r\n\r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
hbar = 1.0
m_e = 1.0
h22m = hbar ** 2 / (2 * m_e)
pi = np.pi
eV = 1 / 27.21138505
eV_Ha = eV
nm = 18.89726124565
kB_eV = 8.6173324e-05
kB = kB_eV * eV_Ha
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
hbar = 1.0
m_e = 1.0
h22m = hbar ** 2 / (2 * m_e)
pi = np.pi
eV = 1 / 27.21138505
eV_Ha = eV
nm = 18.89726124565
kB_eV = 8.6173324e-05
kB = kB_eV * eV_Ha
<|reserved_special_token_1|>
''' Load a variety of relevant physical parameters.
All quantities are in atomic units, such that
m_e = 1
e = 1
hbar = 1
1/4\pi\epsilon = 1
'''
import numpy as np
hbar = 1.0
m_e = 1.0
h22m = hbar**2 / (2*m_e)
pi = np.pi
eV = 1/27.21138505
eV_Ha = eV
nm = 18.89726124565
kB_eV = 8.6173324e-5
kB = kB_eV * eV_Ha
|
flexible
|
{
"blob_id": "f9f835b24aa8fc77109db9e2d89a3f43bcb4b181",
"index": 7079,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nhbar = 1.0\nm_e = 1.0\nh22m = hbar ** 2 / (2 * m_e)\npi = np.pi\neV = 1 / 27.21138505\neV_Ha = eV\nnm = 18.89726124565\nkB_eV = 8.6173324e-05\nkB = kB_eV * eV_Ha\n",
"step-3": "<mask token>\nimport numpy as np\nhbar = 1.0\nm_e = 1.0\nh22m = hbar ** 2 / (2 * m_e)\npi = np.pi\neV = 1 / 27.21138505\neV_Ha = eV\nnm = 18.89726124565\nkB_eV = 8.6173324e-05\nkB = kB_eV * eV_Ha\n",
"step-4": "''' Load a variety of relevant physical parameters.\n\nAll quantities are in atomic units, such that\n m_e = 1\n e = 1\n hbar = 1\n 1/4\\pi\\epsilon = 1\n'''\n\nimport numpy as np\n\nhbar = 1.0\nm_e = 1.0\nh22m = hbar**2 / (2*m_e)\npi = np.pi\neV = 1/27.21138505\neV_Ha = eV\nnm = 18.89726124565\n\nkB_eV = 8.6173324e-5\nkB = kB_eV * eV_Ha \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import tkinter as tk
import Widgets as wg
import Logic as lgc
from tkinter.ttk import Separator
from tkinter.messagebox import showerror, showinfo
# Fonts that we can utilise
FONTS = {"large":("Helvetica", 20), "medium":("Helvetica", 16), "small":("Helvetica", 12)}
class Handler: # Handles the window and the Game interaction
def __init__(self):
# Game Handle
self.Game = None
self.GameParams = {}
# Window Handle
self.Window = Window(self)
self.Window.mainloop()
def Replay (self): # Reset attributes and classes
self.GameParams = {}
del self.Game
self.Game = None
def Is_Running (self):
return self.Game.Running
def Start_Game(self): # Begin the game, run the updates needed.
self.Game = lgc.Game(**self.GameParams)
self.Game.Start_Game()
# Update Game page
self.Update_Game()
self.Window.Pages["Game"].Update_Game_Type()
def Get_Current_Player(self) -> str: # get the current player whose turn it is
if self.Game.Running:
if self.Game.Current_Player == "B":
return "black"
else:
return "white"
else:
return "None"
def Get_Game_Type(self) -> str: # Get the game rule type
g = self.Game.Game_Type
if g == 1:
return "SIMPLE"
else:
return "FULL"
def Get_Score(self) -> tuple: # Get the current score
s = self.Game.Get_Discs()
return s[0], s[1] # b, w
def Move(self, x: int, y: int) -> bool: # Make a move on a given place
complete = self.Game.Next_Move(x, y)
if complete:
self.Update_Game()
self.Game_Complete_Check()
return True
self.Update_Game()
self.Game_Complete_Check()
return False
def Get_Winner(self) -> tuple: # Gets the winner of the game
return self.Game.Check_Winner()
def Game_Complete_Check(self): # Check if the game is over and act accordingly
if self.Is_Running() == False:
# Run Game Over feature here
self.Window.showPage("Postgame")
# Update the post page
self.Window.Pages["Postgame"].Update()
def Update_Game(self): # Run a full update on the game
self.Window.Pages["Game"].Full_Update()
class Window (tk.Tk): # This will be the main window of the GUI
def __init__ (self, controller, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.Handler = controller # This is handler between the game and window
# Root attributes
self.title("Othello")
try:
self.iconbitmap("Icon.ico")
except:
pass
self.minsize(600, 600)
#self.maxsize(1000,1000)
# Master frame
self.container = tk.Frame(self)
self.container.pack(side="top", fill="both", expand=True)
self.container.grid_rowconfigure(0, weight=1)
self.container.grid_columnconfigure(0, weight=1)
# Set up the pages
self.Pages = {}
for page in (Pregame, Custom_Board, Game, Postgame):
# Initiate each page and add them to the dictionary
# Dictionary will use the name of the class so that it can be accessed
# without the knowledge of the clas name
new = page(self.container, self)
self.Pages[page.FrameName] = new
new.grid(row=0, column=0, sticky="nsew")
# Show the initial page
self.showPage("Pregame")
# Window
def showPage(self, pagename: str): # Show a chosen page
page = self.Pages[pagename]
page.tkraise()
# Game
def Begin_Game(self): # Start the game
self.Handler.Start_Game()
def Get_Current_Player (self) -> str: # Get the current player
return self.Handler.Get_Current_Player()
def Replay(self): # Clean up the old game, start an new one
self.Pages["Pregame"].__GUI_Reset__()
self.Pages["Game"].Reset_Game()
self.Handler.Replay()
self.showPage("Pregame")
class Pregame (tk.Frame): # The 'home' screen
FrameName = "Pregame"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
self.set_vals = []
self.__GUI_Reset__()
def __GUI_Reset__(self): # This will clean the screen and then recreate it, this is essential for replaying the game
for widget in self.winfo_children():
widget.destroy()
# Title Banner
tk.Label(self, text="Otello", font=FONTS["large"], bg="white").pack(side="top")
Separator(self, orient="horizontal").pack(side="top", fill="x", padx=10)
# Rule Set
rule_set_frame = tk.Frame(self, bg="white")
rule_set_frame.pack(pady=10)
# Subheading
self.rs_label = tk.Label(rule_set_frame, text="Rule Set", font=FONTS["medium"], bg="white")
self.rs_label.pack(side="top")
self.full_btn = tk.Button(rule_set_frame, text="FULL", font=FONTS["medium"], bg="#bbbbbb",
command=lambda:self.Select_Rule_Set("full"))
self.full_btn.pack()
self.simple_btn = tk.Button(rule_set_frame, text="SIMPLE", font=FONTS["medium"], bg="#bbbbbb",
command=lambda:self.Select_Rule_Set("simple"))
self.simple_btn.pack()
# Row Size
row_frame = tk.Frame(self, bg="white")
row_frame.pack(pady=10)
self.row_label = tk.Label(row_frame, text="Board Rows", font=FONTS["medium"], bg="white")
self.row_label.grid(row=0, column=0, columnspan=7)
self.Rows_Buttons = []
place = 0
for rows in [4, 6, 8, 10, 12, 14, 16]:
x = tk.Button(row_frame, text=str(rows), font=FONTS["small"], bg="#bbbbbb",
command=lambda rows=rows: self.Select_Rows(rows))
x.grid(row=1, column=place)
self.Rows_Buttons.append(x)
place += 1
# Column Size
col_frame = tk.Frame(self, bg="white")
col_frame.pack(pady=10)
self.col_label = tk.Label(col_frame, text="Board Columns", font=FONTS["medium"], bg="white")
self.col_label.grid(row=0, column=0, columnspan=7)
self.Cols_Buttons = []
place = 0
for cols in [4, 6, 8, 10, 12, 14, 16]:
x = tk.Button(col_frame, text=str(cols), font=FONTS["small"], bg="#bbbbbb",
command=lambda cols=cols: self.Select_Cols(cols))
x.grid(row=1, column=place)
self.Cols_Buttons.append(x)
place += 1
# First to Move
first_move_frame = tk.Frame(self, bg="white")
first_move_frame.pack(pady=10)
self.first_move_label = tk.Label(first_move_frame, text="First to move", bg="white", font=FONTS["medium"])
self.first_move_label.grid(row=0, column=0, columnspan=2)
self.black_btn = tk.Button(first_move_frame, text="Black", bg="#bbbbbb", font=FONTS["medium"],
command=lambda:self.Select_First_Move("black"))
self.black_btn.grid(row=1, column=0)
self.white_btn = tk.Button(first_move_frame, text="White", bg="#bbbbbb", font=FONTS["medium"],
command=lambda:self.Select_First_Move("white"))
self.white_btn.grid(row=1, column=1)
# How to win
condition_frame = tk.Frame(self, bg="white")
condition_frame.pack(pady=10)
self.condition_label = tk.Label(condition_frame, text="The winner is, the player with..",
bg="white", font=FONTS["medium"])
self.condition_label.grid(row=0, column=0, columnspan=2)
self.greater_score = tk.Button(condition_frame, text="more discs.", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Select_Condition(">"))
self.greater_score.grid(row=1, column=0)
self.lesser_score = tk.Button(condition_frame, text="less discs.", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Select_Condition("<"))
self.lesser_score.grid(row=1, column=1)
# Start the game button
self.Start_Game_Btn = tk.Button(self, text="Start", bg="#ff2222", activebackground="#992222",
font=FONTS["medium"])
self.Start_Game_Btn.pack(side="bottom")
def Select_Rule_Set(self, _set: str): # sets the rule set of the game
if _set == "simple":
self.controller.Handler.GameParams["game_type"] = 1 # Corresponds to the game logic
else:
self.controller.Handler.GameParams["game_type"] = 2
self.full_btn.destroy()
self.simple_btn.destroy()
self.rs_label.configure(text="Rule Set: " + _set.upper())
self.set_vals.append("rules")
self.Check_Can_Start()
def Select_Rows(self, rows: int): # Sets the rows of the board
self.controller.Handler.GameParams["y_size"] = rows
for button in self.Rows_Buttons:
button.destroy()
self.row_label.configure(text="Board Rows: " + str(rows))
self.set_vals.append("rows")
self.Check_Can_Start()
def Select_Cols(self, cols: int): # sets the columns of the board
self.controller.Handler.GameParams["x_size"] = cols
for button in self.Cols_Buttons:
button.destroy()
self.col_label.configure(text="Board Columns: " + str(cols))
self.set_vals.append("cols")
self.Check_Can_Start()
def Select_First_Move (self, mover: str): # Sets the first player to make a move
if mover == "black":
self.controller.Handler.GameParams["first_move"] = "B"
else:
self.controller.Handler.GameParams["first_move"] = "W"
self.black_btn.destroy()
self.white_btn.destroy()
self.first_move_label.configure(text="First to move: " + mover)
self.set_vals.append("move")
self.Check_Can_Start()
def Select_Condition(self, condition: str):# This will set the game win condition
self.controller.Handler.GameParams["game_winner"] = condition
if condition == ">":
self.condition_label.configure(text="The winner is, the player with more discs.")
else:
self.condition_label.configure(text="The winner is, the player with less discs.")
self.lesser_score.destroy()
self.greater_score.destroy()
self.set_vals.append("win")
self.Check_Can_Start()
def Check_Can_Start (self): # This will start the game if the game can be started
if "rules" in self.set_vals and\
"rows" in self.set_vals and\
"cols" in self.set_vals and\
"move" in self.set_vals and\
"win" in self.set_vals:
self.Start_Game_Btn.configure(bg="#22ff22", activebackground="#229922",
command=lambda: self.Start_Custom_Board())
def Start_Custom_Board (self):
self.controller.Pages["Setup_Board"].Setup_Board()
self.controller.showPage("Setup_Board")
self.controller.Pages["Setup_Board"].Instructions_Display()
class Custom_Board (tk.Frame):
FrameName = "Setup_Board"
def __init__ (self, parent, controller):
tk.Frame.__init__ (self, parent)
self.controller = controller
self.configure(bg="white")
# Title bar
self.Title_Frame = tk.Frame(self, bg="white")
self.Title_Frame.pack(side="top", fill="x")
# Title
tk.Label(self.Title_Frame, text="Create Custom Board", bg="white", font=FONTS["medium"]).pack(side="left")
# Start Button
start = tk.Button(self.Title_Frame, text="Play", bg="#22ff22", activebackground="#229922", font=FONTS["medium"],
command=lambda: self.Start())
start.pack(side="right")
# Use custom Board check button
self.Use_Board = tk.IntVar()
Use_Board = tk.Checkbutton(self.Title_Frame, text="Use custom board", font=FONTS["medium"],
bg="white", activebackground="white",
var=self.Use_Board, onvalue=1, offvalue=0)
Use_Board.pack(side="right", padx=10)
# Board
self.Board_Area = tk.Frame(self, bg="#009900")
self.Board_Area.pack(side="top", fill="both", expand=True)
self.Board = []
def Setup_Board (self):
for widget in self.Board_Area.winfo_children():
widget.destroy()
self.Board = []
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
# Diameter with respond to the length of the shortest side of the board
height = self.Board_Area.winfo_height()
width = self.Board_Area.winfo_width()
if height > width:
diameter = width/self.controller.Handler.GameParams["x_size"]
else:
diameter = height/self.controller.Handler.GameParams["y_size"]
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
disc = wg.Disc(self.Board_Area, self.controller, diameter=diameter, mode="setup")
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
def Parse_Board (self) -> list: # This will parse the GUI board and create a board that will work for the Game()
new_board = []
for row in self.Board:
new_row = []
for disc in row:
if disc.Current_Color == "white":
new_row.append("W")
elif disc.Current_Color == "black":
new_row.append("B")
else:
new_row.append(None)
new_board.append(new_row)
return new_board
def Instructions_Display(self):
showinfo("How to use", "Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!")
def Start (self): # This will check if the user wants to use a custom board and then will set Game board to be the users selection
if self.Use_Board.get():
self.controller.Handler.GameParams["board"] = self.Parse_Board()
self.controller.Begin_Game()
self.controller.Pages["Game"].__GUI_init__()
self.controller.Pages["Game"].Update_Board()
self.controller.showPage("Game")
class Game (tk.Frame): # This is the 'stage' where the game will be played.
FrameName = "Game"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
# Status Bar
self.Status_Bar = tk.Frame(self, bg="white")
self.Status_Bar.pack(side="top", fill="x")
self.Status_Bar.grid_columnconfigure(0, weight=1)
self.Status_Bar.grid_columnconfigure(1, weight=1)
self.Status_Bar.grid_columnconfigure(2, weight=1)
self.Status_Bar.grid_rowconfigure(0, weight=1)
self.Current_Player = tk.Label(self.Status_Bar, text="None", bg="white", font=FONTS["medium"])
self.Current_Player.grid(row=0, column=0)
self.Game_Type = tk.Label(self.Status_Bar, text="FULL", bg="white", font=FONTS["medium"])
self.Game_Type.grid(row=0, column=1)
self.Score = tk.Label(self.Status_Bar, text="Black: 2 | 2:White", bg="white", font=FONTS["medium"])
self.Score.grid(row=0, column=2)
# Board
self.Board_Area = tk.Frame(self, bg="#009900")
self.Board_Area.pack(side="top", fill="both", expand=True)
self.Board = []
def __GUI_init__ (self): # This will initiate the game board once all the datya is provided.
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
# Diameter with respond to the length of the shortest side of the board
height = self.Board_Area.winfo_height()
width = self.Board_Area.winfo_width()
if height > width:
diameter = width/self.controller.Handler.GameParams["x_size"]
else:
diameter = height/self.controller.Handler.GameParams["y_size"]
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
disc = wg.Disc(self.Board_Area, self.controller, diameter=diameter,
command= lambda x=x, y=y: self.Disc_Function(x, y))
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
self.Update_Board()
def Reset_Game(self): #This will reset the game board to its initial state
self.Board = []
for widget in self.Board_Area.winfo_children():
widget.destroy()
def Disc_Function (self, x: int, y: int): # This is the function run when the player clicks a disc slot/disc
if not self.controller.Handler.Move(x+1, y+1): # Try run the Move function on the Handler
self.Invalid_Move()
def Invalid_Move(self): # This command will run when a player tries to make a move thats not possible
showerror("Invalid Move", "You cannot move there!")
def Update_Board (self): # Update the board to mathe the Game() board
for y in range(len(self.Board)):
for x in range(len(self.Board[y])):
game_piece = self.controller.Handler.Game.Board[y][x]
if game_piece == None:
pass
elif game_piece == "B":
if self.Board[y][x].Current_Color != "black":
self.Board[y][x].Set_Piece_Color("black")
elif game_piece == "W":
if self.Board[y][x].Current_Color != "white":
self.Board[y][x].Set_Piece_Color("white")
def Update_Current_Player (self): # Update the current player identifier
self.Current_Player.config(text="Turn: " + self.controller.Get_Current_Player())
def Update_Game_Type(self): # Update the game type identifier
g_type = self.controller.Handler.Get_Game_Type()
self.Game_Type.configure(text="Rules: " + g_type)
def Update_Score (self): # Update the score identifier
b, w = self.controller.Handler.Get_Score()
self.Score.configure(text="Black: {0!s} | {1!s} :White".format(b, w))
def Full_Update(self): # Run a full update on the graphics
self.Update_Score()
self.Update_Current_Player()
self.Update_Board()
class Postgame (tk.Frame): # The 'end game' screen
FrameName = "Postgame"
def __init__ (self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.configure(bg="white")
# Set a page title
self.Title = tk.Label(self, text="Game Over!", bg="white", font=FONTS["large"])
self.Title.pack(side="top")
Separator(self, orient="horizontal").pack(side="top", fill="x", padx=10)
# Set the winner text object
self.Winner = tk.Label(self, text="The winner is black-discs.", bg="white", font=FONTS["medium"])
self.Winner.pack(side="top")
# Create the replay and exit buttons
self.Buttons = tk.Frame(self, bg="white")
self.Buttons.pack()
Replay = tk.Button(self.Buttons, text="Replay", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Replay())
Replay.grid(row=0, column=0)
Quit = tk.Button(self.Buttons, text="Quit", bg="#bbbbbb", font=FONTS["medium"],
command=lambda: self.Quit())
Quit.grid(row=0, column=1)
# the area for the board output
self.Board_Area = tk.Frame(self, bg="white")
self.Board_Area.pack(side="bottom")
# Score text
self.Score = tk.Label(self.Board_Area, text="", bg="white", font=FONTS["medium"])
self.Score.pack()
# The display for the board
self.Board_Display = tk.Frame(self.Board_Area, bg="green")
self.Board_Display.pack()
self.Board = []
def Replay(self): # Initiate the Replay
self.controller.Replay()
def Quit(self): # Kill the game
self.controller.destroy()
exit()
def Update_Board (self): # Update the game board display, kill old, create new
for widget in self.Board_Display.winfo_children():
widget.destroy()
for y in range(self.controller.Handler.GameParams["y_size"]):
row = []
for x in range(self.controller.Handler.GameParams["x_size"]):
self.Board_Area.grid_columnconfigure(x, weight=1)
self.Board_Area.grid_rowconfigure(y, weight=1)
col = None
place_col = self.controller.Handler.Game.Board[y][x]
if place_col == "B":
col = "black"
elif place_col == "W":
col = "white"
disc = wg.Disc(self.Board_Display, self.controller, col=col, diameter=50)
disc.grid(row=y, column=x, sticky="nsew")
row.append(disc)
self.Board.append(row)
def Update(self): # Update the whole page
winner, scores = self.controller.Handler.Get_Winner()
if winner.lower() == "b":
winner = "black-discs"
elif winner.lower() == "w":
winner = "white-discs"
else:
winner == "no one"
self.Winner.configure(text="The winner is " + winner)
self.Score.configure(text="Black: {0!s} | {1!s}:White".format(scores[0], scores[1]))
self.Update_Board()
if __name__ == "__main__":
Window = Handler()
|
normal
|
{
"blob_id": "9b8f3962172d4a867a3a070b6139bb302fd7e2f5",
"index": 9934,
"step-1": "<mask token>\n\n\nclass Window(tk.Tk):\n <mask token>\n <mask token>\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Window(tk.Tk):\n <mask token>\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Window(tk.Tk):\n\n def __init__(self, controller, *args, **kwargs):\n tk.Tk.__init__(self, *args, **kwargs)\n self.Handler = controller\n self.title('Othello')\n try:\n self.iconbitmap('Icon.ico')\n except:\n pass\n self.minsize(600, 600)\n self.container = tk.Frame(self)\n self.container.pack(side='top', fill='both', expand=True)\n self.container.grid_rowconfigure(0, weight=1)\n self.container.grid_columnconfigure(0, weight=1)\n self.Pages = {}\n for page in (Pregame, Custom_Board, Game, Postgame):\n new = page(self.container, self)\n self.Pages[page.FrameName] = new\n new.grid(row=0, column=0, sticky='nsew')\n self.showPage('Pregame')\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n <mask token>\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n <mask token>\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Handler:\n\n def __init__(self):\n self.Game = None\n self.GameParams = {}\n self.Window = Window(self)\n self.Window.mainloop()\n <mask token>\n\n def Is_Running(self):\n return self.Game.Running\n\n def Start_Game(self):\n self.Game = lgc.Game(**self.GameParams)\n self.Game.Start_Game()\n self.Update_Game()\n self.Window.Pages['Game'].Update_Game_Type()\n\n def Get_Current_Player(self) ->str:\n if self.Game.Running:\n if self.Game.Current_Player == 'B':\n return 'black'\n else:\n return 'white'\n else:\n return 'None'\n\n def Get_Game_Type(self) ->str:\n g = self.Game.Game_Type\n if g == 1:\n return 'SIMPLE'\n else:\n return 'FULL'\n <mask token>\n\n def Move(self, x: int, y: int) ->bool:\n complete = self.Game.Next_Move(x, y)\n if complete:\n self.Update_Game()\n self.Game_Complete_Check()\n return True\n self.Update_Game()\n self.Game_Complete_Check()\n return False\n\n def Get_Winner(self) ->tuple:\n return self.Game.Check_Winner()\n\n def Game_Complete_Check(self):\n if self.Is_Running() == False:\n self.Window.showPage('Postgame')\n self.Window.Pages['Postgame'].Update()\n <mask token>\n\n\nclass Window(tk.Tk):\n\n def __init__(self, controller, *args, **kwargs):\n tk.Tk.__init__(self, *args, **kwargs)\n self.Handler = controller\n self.title('Othello')\n try:\n self.iconbitmap('Icon.ico')\n except:\n pass\n self.minsize(600, 600)\n self.container = tk.Frame(self)\n self.container.pack(side='top', fill='both', expand=True)\n self.container.grid_rowconfigure(0, weight=1)\n self.container.grid_columnconfigure(0, weight=1)\n self.Pages = {}\n for page in (Pregame, Custom_Board, Game, Postgame):\n new = page(self.container, self)\n self.Pages[page.FrameName] = new\n new.grid(row=0, column=0, sticky='nsew')\n self.showPage('Pregame')\n\n def showPage(self, pagename: str):\n page = self.Pages[pagename]\n page.tkraise()\n\n def Begin_Game(self):\n self.Handler.Start_Game()\n\n def Get_Current_Player(self) ->str:\n return self.Handler.Get_Current_Player()\n\n def Replay(self):\n self.Pages['Pregame'].__GUI_Reset__()\n self.Pages['Game'].Reset_Game()\n self.Handler.Replay()\n self.showPage('Pregame')\n\n\nclass Pregame(tk.Frame):\n FrameName = 'Pregame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.set_vals = []\n self.__GUI_Reset__()\n\n def __GUI_Reset__(self):\n for widget in self.winfo_children():\n widget.destroy()\n tk.Label(self, text='Otello', font=FONTS['large'], bg='white').pack(\n side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n rule_set_frame = tk.Frame(self, bg='white')\n rule_set_frame.pack(pady=10)\n self.rs_label = tk.Label(rule_set_frame, text='Rule Set', font=\n FONTS['medium'], bg='white')\n self.rs_label.pack(side='top')\n self.full_btn = tk.Button(rule_set_frame, text='FULL', font=FONTS[\n 'medium'], bg='#bbbbbb', command=lambda : self.Select_Rule_Set(\n 'full'))\n self.full_btn.pack()\n self.simple_btn = tk.Button(rule_set_frame, text='SIMPLE', font=\n FONTS['medium'], bg='#bbbbbb', command=lambda : self.\n Select_Rule_Set('simple'))\n self.simple_btn.pack()\n row_frame = tk.Frame(self, bg='white')\n row_frame.pack(pady=10)\n self.row_label = tk.Label(row_frame, text='Board Rows', font=FONTS[\n 'medium'], bg='white')\n self.row_label.grid(row=0, column=0, columnspan=7)\n self.Rows_Buttons = []\n place = 0\n for rows in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(row_frame, text=str(rows), font=FONTS['small'],\n bg='#bbbbbb', command=lambda rows=rows: self.Select_Rows(rows))\n x.grid(row=1, column=place)\n self.Rows_Buttons.append(x)\n place += 1\n col_frame = tk.Frame(self, bg='white')\n col_frame.pack(pady=10)\n self.col_label = tk.Label(col_frame, text='Board Columns', font=\n FONTS['medium'], bg='white')\n self.col_label.grid(row=0, column=0, columnspan=7)\n self.Cols_Buttons = []\n place = 0\n for cols in [4, 6, 8, 10, 12, 14, 16]:\n x = tk.Button(col_frame, text=str(cols), font=FONTS['small'],\n bg='#bbbbbb', command=lambda cols=cols: self.Select_Cols(cols))\n x.grid(row=1, column=place)\n self.Cols_Buttons.append(x)\n place += 1\n first_move_frame = tk.Frame(self, bg='white')\n first_move_frame.pack(pady=10)\n self.first_move_label = tk.Label(first_move_frame, text=\n 'First to move', bg='white', font=FONTS['medium'])\n self.first_move_label.grid(row=0, column=0, columnspan=2)\n self.black_btn = tk.Button(first_move_frame, text='Black', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('black'))\n self.black_btn.grid(row=1, column=0)\n self.white_btn = tk.Button(first_move_frame, text='White', bg=\n '#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_First_Move('white'))\n self.white_btn.grid(row=1, column=1)\n condition_frame = tk.Frame(self, bg='white')\n condition_frame.pack(pady=10)\n self.condition_label = tk.Label(condition_frame, text=\n 'The winner is, the player with..', bg='white', font=FONTS[\n 'medium'])\n self.condition_label.grid(row=0, column=0, columnspan=2)\n self.greater_score = tk.Button(condition_frame, text='more discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('>'))\n self.greater_score.grid(row=1, column=0)\n self.lesser_score = tk.Button(condition_frame, text='less discs.',\n bg='#bbbbbb', font=FONTS['medium'], command=lambda : self.\n Select_Condition('<'))\n self.lesser_score.grid(row=1, column=1)\n self.Start_Game_Btn = tk.Button(self, text='Start', bg='#ff2222',\n activebackground='#992222', font=FONTS['medium'])\n self.Start_Game_Btn.pack(side='bottom')\n\n def Select_Rule_Set(self, _set: str):\n if _set == 'simple':\n self.controller.Handler.GameParams['game_type'] = 1\n else:\n self.controller.Handler.GameParams['game_type'] = 2\n self.full_btn.destroy()\n self.simple_btn.destroy()\n self.rs_label.configure(text='Rule Set: ' + _set.upper())\n self.set_vals.append('rules')\n self.Check_Can_Start()\n\n def Select_Rows(self, rows: int):\n self.controller.Handler.GameParams['y_size'] = rows\n for button in self.Rows_Buttons:\n button.destroy()\n self.row_label.configure(text='Board Rows: ' + str(rows))\n self.set_vals.append('rows')\n self.Check_Can_Start()\n\n def Select_Cols(self, cols: int):\n self.controller.Handler.GameParams['x_size'] = cols\n for button in self.Cols_Buttons:\n button.destroy()\n self.col_label.configure(text='Board Columns: ' + str(cols))\n self.set_vals.append('cols')\n self.Check_Can_Start()\n\n def Select_First_Move(self, mover: str):\n if mover == 'black':\n self.controller.Handler.GameParams['first_move'] = 'B'\n else:\n self.controller.Handler.GameParams['first_move'] = 'W'\n self.black_btn.destroy()\n self.white_btn.destroy()\n self.first_move_label.configure(text='First to move: ' + mover)\n self.set_vals.append('move')\n self.Check_Can_Start()\n\n def Select_Condition(self, condition: str):\n self.controller.Handler.GameParams['game_winner'] = condition\n if condition == '>':\n self.condition_label.configure(text=\n 'The winner is, the player with more discs.')\n else:\n self.condition_label.configure(text=\n 'The winner is, the player with less discs.')\n self.lesser_score.destroy()\n self.greater_score.destroy()\n self.set_vals.append('win')\n self.Check_Can_Start()\n\n def Check_Can_Start(self):\n if ('rules' in self.set_vals and 'rows' in self.set_vals and 'cols' in\n self.set_vals and 'move' in self.set_vals and 'win' in self.\n set_vals):\n self.Start_Game_Btn.configure(bg='#22ff22', activebackground=\n '#229922', command=lambda : self.Start_Custom_Board())\n\n def Start_Custom_Board(self):\n self.controller.Pages['Setup_Board'].Setup_Board()\n self.controller.showPage('Setup_Board')\n self.controller.Pages['Setup_Board'].Instructions_Display()\n\n\nclass Custom_Board(tk.Frame):\n FrameName = 'Setup_Board'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title_Frame = tk.Frame(self, bg='white')\n self.Title_Frame.pack(side='top', fill='x')\n tk.Label(self.Title_Frame, text='Create Custom Board', bg='white',\n font=FONTS['medium']).pack(side='left')\n start = tk.Button(self.Title_Frame, text='Play', bg='#22ff22',\n activebackground='#229922', font=FONTS['medium'], command=lambda :\n self.Start())\n start.pack(side='right')\n self.Use_Board = tk.IntVar()\n Use_Board = tk.Checkbutton(self.Title_Frame, text=\n 'Use custom board', font=FONTS['medium'], bg='white',\n activebackground='white', var=self.Use_Board, onvalue=1, offvalue=0\n )\n Use_Board.pack(side='right', padx=10)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def Setup_Board(self):\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n self.Board = []\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, mode='setup')\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Parse_Board(self) ->list:\n new_board = []\n for row in self.Board:\n new_row = []\n for disc in row:\n if disc.Current_Color == 'white':\n new_row.append('W')\n elif disc.Current_Color == 'black':\n new_row.append('B')\n else:\n new_row.append(None)\n new_board.append(new_row)\n return new_board\n\n def Instructions_Display(self):\n showinfo('How to use',\n 'Click on a tile to cycle between white, black or empty. Check the \"Use Custom Board\" box to use this board!'\n )\n\n def Start(self):\n if self.Use_Board.get():\n self.controller.Handler.GameParams['board'] = self.Parse_Board()\n self.controller.Begin_Game()\n self.controller.Pages['Game'].__GUI_init__()\n self.controller.Pages['Game'].Update_Board()\n self.controller.showPage('Game')\n\n\nclass Game(tk.Frame):\n FrameName = 'Game'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Status_Bar = tk.Frame(self, bg='white')\n self.Status_Bar.pack(side='top', fill='x')\n self.Status_Bar.grid_columnconfigure(0, weight=1)\n self.Status_Bar.grid_columnconfigure(1, weight=1)\n self.Status_Bar.grid_columnconfigure(2, weight=1)\n self.Status_Bar.grid_rowconfigure(0, weight=1)\n self.Current_Player = tk.Label(self.Status_Bar, text='None', bg=\n 'white', font=FONTS['medium'])\n self.Current_Player.grid(row=0, column=0)\n self.Game_Type = tk.Label(self.Status_Bar, text='FULL', bg='white',\n font=FONTS['medium'])\n self.Game_Type.grid(row=0, column=1)\n self.Score = tk.Label(self.Status_Bar, text='Black: 2 | 2:White',\n bg='white', font=FONTS['medium'])\n self.Score.grid(row=0, column=2)\n self.Board_Area = tk.Frame(self, bg='#009900')\n self.Board_Area.pack(side='top', fill='both', expand=True)\n self.Board = []\n\n def __GUI_init__(self):\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n height = self.Board_Area.winfo_height()\n width = self.Board_Area.winfo_width()\n if height > width:\n diameter = width / self.controller.Handler.GameParams[\n 'x_size']\n else:\n diameter = height / self.controller.Handler.GameParams[\n 'y_size']\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n disc = wg.Disc(self.Board_Area, self.controller, diameter=\n diameter, command=lambda x=x, y=y: self.Disc_Function(x, y)\n )\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n self.Update_Board()\n\n def Reset_Game(self):\n self.Board = []\n for widget in self.Board_Area.winfo_children():\n widget.destroy()\n\n def Disc_Function(self, x: int, y: int):\n if not self.controller.Handler.Move(x + 1, y + 1):\n self.Invalid_Move()\n\n def Invalid_Move(self):\n showerror('Invalid Move', 'You cannot move there!')\n\n def Update_Board(self):\n for y in range(len(self.Board)):\n for x in range(len(self.Board[y])):\n game_piece = self.controller.Handler.Game.Board[y][x]\n if game_piece == None:\n pass\n elif game_piece == 'B':\n if self.Board[y][x].Current_Color != 'black':\n self.Board[y][x].Set_Piece_Color('black')\n elif game_piece == 'W':\n if self.Board[y][x].Current_Color != 'white':\n self.Board[y][x].Set_Piece_Color('white')\n\n def Update_Current_Player(self):\n self.Current_Player.config(text='Turn: ' + self.controller.\n Get_Current_Player())\n\n def Update_Game_Type(self):\n g_type = self.controller.Handler.Get_Game_Type()\n self.Game_Type.configure(text='Rules: ' + g_type)\n\n def Update_Score(self):\n b, w = self.controller.Handler.Get_Score()\n self.Score.configure(text='Black: {0!s} | {1!s} :White'.format(b, w))\n\n def Full_Update(self):\n self.Update_Score()\n self.Update_Current_Player()\n self.Update_Board()\n\n\nclass Postgame(tk.Frame):\n FrameName = 'Postgame'\n\n def __init__(self, parent, controller):\n tk.Frame.__init__(self, parent)\n self.controller = controller\n self.configure(bg='white')\n self.Title = tk.Label(self, text='Game Over!', bg='white', font=\n FONTS['large'])\n self.Title.pack(side='top')\n Separator(self, orient='horizontal').pack(side='top', fill='x', padx=10\n )\n self.Winner = tk.Label(self, text='The winner is black-discs.', bg=\n 'white', font=FONTS['medium'])\n self.Winner.pack(side='top')\n self.Buttons = tk.Frame(self, bg='white')\n self.Buttons.pack()\n Replay = tk.Button(self.Buttons, text='Replay', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Replay())\n Replay.grid(row=0, column=0)\n Quit = tk.Button(self.Buttons, text='Quit', bg='#bbbbbb', font=\n FONTS['medium'], command=lambda : self.Quit())\n Quit.grid(row=0, column=1)\n self.Board_Area = tk.Frame(self, bg='white')\n self.Board_Area.pack(side='bottom')\n self.Score = tk.Label(self.Board_Area, text='', bg='white', font=\n FONTS['medium'])\n self.Score.pack()\n self.Board_Display = tk.Frame(self.Board_Area, bg='green')\n self.Board_Display.pack()\n self.Board = []\n\n def Replay(self):\n self.controller.Replay()\n\n def Quit(self):\n self.controller.destroy()\n exit()\n\n def Update_Board(self):\n for widget in self.Board_Display.winfo_children():\n widget.destroy()\n for y in range(self.controller.Handler.GameParams['y_size']):\n row = []\n for x in range(self.controller.Handler.GameParams['x_size']):\n self.Board_Area.grid_columnconfigure(x, weight=1)\n self.Board_Area.grid_rowconfigure(y, weight=1)\n col = None\n place_col = self.controller.Handler.Game.Board[y][x]\n if place_col == 'B':\n col = 'black'\n elif place_col == 'W':\n col = 'white'\n disc = wg.Disc(self.Board_Display, self.controller, col=col,\n diameter=50)\n disc.grid(row=y, column=x, sticky='nsew')\n row.append(disc)\n self.Board.append(row)\n\n def Update(self):\n winner, scores = self.controller.Handler.Get_Winner()\n if winner.lower() == 'b':\n winner = 'black-discs'\n elif winner.lower() == 'w':\n winner = 'white-discs'\n else:\n winner == 'no one'\n self.Winner.configure(text='The winner is ' + winner)\n self.Score.configure(text='Black: {0!s} | {1!s}:White'.format(\n scores[0], scores[1]))\n self.Update_Board()\n\n\n<mask token>\n",
"step-5": "import tkinter \t\tas tk\nimport Widgets \t\tas wg\nimport Logic \t\tas lgc\nfrom tkinter.ttk \timport Separator\nfrom tkinter.messagebox import showerror, showinfo\n\n# Fonts that we can utilise\nFONTS = {\"large\":(\"Helvetica\", 20), \"medium\":(\"Helvetica\", 16), \"small\":(\"Helvetica\", 12)}\n\nclass Handler: # Handles the window and the Game interaction\n\tdef __init__(self):\n\n\t\t# Game Handle\n\t\tself.Game = None\n\t\tself.GameParams = {}\n\n\t\t# Window Handle\n\t\tself.Window = Window(self)\n\t\tself.Window.mainloop()\n\n\tdef Replay (self): # Reset attributes and classes\n\t\tself.GameParams = {}\n\t\tdel self.Game\n\t\tself.Game = None\n\t\t\n\tdef Is_Running (self):\n\t\treturn self.Game.Running\n\n\tdef Start_Game(self): # Begin the game, run the updates needed.\n\t\tself.Game = lgc.Game(**self.GameParams)\n\t\tself.Game.Start_Game()\n\n\t\t# Update Game page\n\t\tself.Update_Game()\n\t\tself.Window.Pages[\"Game\"].Update_Game_Type()\n\n\tdef Get_Current_Player(self) -> str: # get the current player whose turn it is\n\t\tif self.Game.Running:\n\t\t\tif self.Game.Current_Player == \"B\":\n\t\t\t\treturn \"black\"\n\t\t\telse:\n\t\t\t\treturn \"white\"\n\t\telse:\n\t\t\treturn \"None\"\n\n\tdef Get_Game_Type(self) -> str: # Get the game rule type\n\t\tg = self.Game.Game_Type\n\t\tif g == 1:\n\t\t\treturn \"SIMPLE\"\n\t\telse:\n\t\t\treturn \"FULL\"\n\n\tdef Get_Score(self) -> tuple: # Get the current score\n\t\ts = self.Game.Get_Discs()\n\t\treturn s[0], s[1] # b, w\n\n\tdef Move(self, x: int, y: int) -> bool: # Make a move on a given place\n\t\tcomplete = self.Game.Next_Move(x, y)\n\t\tif complete:\n\t\t\tself.Update_Game()\n\t\t\tself.Game_Complete_Check()\n\t\t\treturn True\n\t\tself.Update_Game()\n\t\tself.Game_Complete_Check()\n\t\treturn False\n\n\tdef Get_Winner(self) -> tuple: # Gets the winner of the game\n\t\treturn self.Game.Check_Winner()\n\n\tdef Game_Complete_Check(self): # Check if the game is over and act accordingly\n\t\tif self.Is_Running() == False:\n\t\t\t# Run Game Over feature here\n\t\t\tself.Window.showPage(\"Postgame\")\n\t\t\t# Update the post page\n\t\t\tself.Window.Pages[\"Postgame\"].Update()\n\n\tdef Update_Game(self): # Run a full update on the game\n\t\tself.Window.Pages[\"Game\"].Full_Update()\n\nclass Window (tk.Tk): # This will be the main window of the GUI\n\tdef __init__ (self, controller, *args, **kwargs):\n\t\ttk.Tk.__init__(self, *args, **kwargs)\n\n\t\tself.Handler = controller # This is handler between the game and window\n\n\t\t# Root attributes\n\t\tself.title(\"Othello\")\n\t\t\n\t\ttry:\n\t\t\tself.iconbitmap(\"Icon.ico\")\n\t\texcept:\n\t\t\tpass\n\n\t\tself.minsize(600, 600)\n\t\t#self.maxsize(1000,1000)\n\n\t\t# Master frame\n\t\tself.container = tk.Frame(self)\n\t\tself.container.pack(side=\"top\", fill=\"both\", expand=True)\n\t\tself.container.grid_rowconfigure(0, weight=1)\n\t\tself.container.grid_columnconfigure(0, weight=1)\n\n\t\t# Set up the pages\n\t\tself.Pages = {}\n\t\tfor page in (Pregame, Custom_Board, Game, Postgame):\n\t\t\t# Initiate each page and add them to the dictionary\n\t\t\t# Dictionary will use the name of the class so that it can be accessed\n\t\t\t# without the knowledge of the clas name\n\t\t\tnew = page(self.container, self)\n\t\t\tself.Pages[page.FrameName] = new\n\t\t\tnew.grid(row=0, column=0, sticky=\"nsew\")\n\n\t\t# Show the initial page\n\t\tself.showPage(\"Pregame\")\n\n\t# Window\n\n\tdef showPage(self, pagename: str): # Show a chosen page\n\t\tpage = self.Pages[pagename]\n\t\tpage.tkraise()\n\n\t# Game\n\tdef Begin_Game(self): # Start the game\n\t\tself.Handler.Start_Game()\n\n\tdef Get_Current_Player (self) -> str: # Get the current player\n\t\treturn self.Handler.Get_Current_Player()\n\n\tdef Replay(self): # Clean up the old game, start an new one\n\t\tself.Pages[\"Pregame\"].__GUI_Reset__()\n\t\tself.Pages[\"Game\"].Reset_Game()\n\t\tself.Handler.Replay()\n\t\tself.showPage(\"Pregame\")\n\nclass Pregame (tk.Frame): # The 'home' screen\n\tFrameName = \"Pregame\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\t\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\tself.set_vals = []\n\n\t\tself.__GUI_Reset__()\n\n\tdef __GUI_Reset__(self): # This will clean the screen and then recreate it, this is essential for replaying the game\n\t\tfor widget in self.winfo_children():\n\t\t\twidget.destroy()\n\n\t\t# Title Banner\n\t\ttk.Label(self, text=\"Otello\", font=FONTS[\"large\"], bg=\"white\").pack(side=\"top\")\n\t\tSeparator(self, orient=\"horizontal\").pack(side=\"top\", fill=\"x\", padx=10)\n\n\t\t# Rule Set\n\t\trule_set_frame = tk.Frame(self, bg=\"white\")\n\t\trule_set_frame.pack(pady=10)\n\t\t# Subheading\n\t\tself.rs_label = tk.Label(rule_set_frame, text=\"Rule Set\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.rs_label.pack(side=\"top\")\n\n\t\tself.full_btn = tk.Button(rule_set_frame, text=\"FULL\", font=FONTS[\"medium\"], bg=\"#bbbbbb\",\n\t\t\tcommand=lambda:self.Select_Rule_Set(\"full\"))\n\t\tself.full_btn.pack()\n\n\t\tself.simple_btn = tk.Button(rule_set_frame, text=\"SIMPLE\", font=FONTS[\"medium\"], bg=\"#bbbbbb\",\n\t\t\tcommand=lambda:self.Select_Rule_Set(\"simple\"))\n\t\tself.simple_btn.pack()\n\n\t\t# Row Size\n\t\trow_frame = tk.Frame(self, bg=\"white\")\n\t\trow_frame.pack(pady=10)\n\n\t\tself.row_label = tk.Label(row_frame, text=\"Board Rows\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.row_label.grid(row=0, column=0, columnspan=7)\n\n\t\tself.Rows_Buttons = []\n\n\t\tplace = 0\n\t\tfor rows in [4, 6, 8, 10, 12, 14, 16]:\n\t\t\tx = tk.Button(row_frame, text=str(rows), font=FONTS[\"small\"], bg=\"#bbbbbb\",\n\t\t\t\tcommand=lambda rows=rows: self.Select_Rows(rows))\n\t\t\tx.grid(row=1, column=place)\n\t\t\tself.Rows_Buttons.append(x)\n\t\t\tplace += 1\n\n\t\t# Column Size\n\t\tcol_frame = tk.Frame(self, bg=\"white\")\n\t\tcol_frame.pack(pady=10)\n\n\t\tself.col_label = tk.Label(col_frame, text=\"Board Columns\", font=FONTS[\"medium\"], bg=\"white\")\n\t\tself.col_label.grid(row=0, column=0, columnspan=7)\n\n\t\tself.Cols_Buttons = []\n\n\t\tplace = 0\n\t\tfor cols in [4, 6, 8, 10, 12, 14, 16]:\n\t\t\tx = tk.Button(col_frame, text=str(cols), font=FONTS[\"small\"], bg=\"#bbbbbb\",\n\t\t\t\tcommand=lambda cols=cols: self.Select_Cols(cols))\n\t\t\tx.grid(row=1, column=place)\n\t\t\tself.Cols_Buttons.append(x)\n\t\t\tplace += 1\n\n\t\t# First to Move\n\t\tfirst_move_frame = tk.Frame(self, bg=\"white\")\n\t\tfirst_move_frame.pack(pady=10)\n\n\t\tself.first_move_label = tk.Label(first_move_frame, text=\"First to move\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.first_move_label.grid(row=0, column=0, columnspan=2)\n\n\t\tself.black_btn = tk.Button(first_move_frame, text=\"Black\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda:self.Select_First_Move(\"black\"))\n\t\tself.black_btn.grid(row=1, column=0)\n\n\t\tself.white_btn = tk.Button(first_move_frame, text=\"White\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda:self.Select_First_Move(\"white\"))\n\t\tself.white_btn.grid(row=1, column=1)\n\n\t\t# How to win\n\t\tcondition_frame = tk.Frame(self, bg=\"white\")\n\t\tcondition_frame.pack(pady=10)\n\n\t\tself.condition_label = tk.Label(condition_frame, text=\"The winner is, the player with..\",\n\t\t\tbg=\"white\", font=FONTS[\"medium\"])\n\t\tself.condition_label.grid(row=0, column=0, columnspan=2)\n\n\t\tself.greater_score = tk.Button(condition_frame, text=\"more discs.\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Select_Condition(\">\"))\n\t\tself.greater_score.grid(row=1, column=0)\n\n\t\tself.lesser_score = tk.Button(condition_frame, text=\"less discs.\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Select_Condition(\"<\"))\n\t\tself.lesser_score.grid(row=1, column=1)\n\n\n\t\t# Start the game button\n\t\tself.Start_Game_Btn = tk.Button(self, text=\"Start\", bg=\"#ff2222\", activebackground=\"#992222\",\n\t\t\t\t\t\t\t\t\tfont=FONTS[\"medium\"])\n\t\tself.Start_Game_Btn.pack(side=\"bottom\")\n\n\tdef Select_Rule_Set(self, _set: str): # sets the rule set of the game\n\t\tif _set == \"simple\":\n\t\t\tself.controller.Handler.GameParams[\"game_type\"] = 1 # Corresponds to the game logic\n\t\telse:\n\t\t\tself.controller.Handler.GameParams[\"game_type\"] = 2\n\n\t\tself.full_btn.destroy()\n\t\tself.simple_btn.destroy()\n\t\tself.rs_label.configure(text=\"Rule Set: \" + _set.upper())\n\n\t\tself.set_vals.append(\"rules\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Rows(self, rows: int): # Sets the rows of the board\n\t\tself.controller.Handler.GameParams[\"y_size\"] = rows\n\n\t\tfor button in self.Rows_Buttons:\n\t\t\tbutton.destroy()\n\n\t\tself.row_label.configure(text=\"Board Rows: \" + str(rows))\n\n\t\tself.set_vals.append(\"rows\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Cols(self, cols: int): # sets the columns of the board\n\t\tself.controller.Handler.GameParams[\"x_size\"] = cols\n\n\t\tfor button in self.Cols_Buttons:\n\t\t\tbutton.destroy()\n\n\t\tself.col_label.configure(text=\"Board Columns: \" + str(cols))\n\t\t\n\t\tself.set_vals.append(\"cols\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_First_Move (self, mover: str): # Sets the first player to make a move\n\t\tif mover == \"black\":\n\t\t\tself.controller.Handler.GameParams[\"first_move\"] = \"B\"\n\t\telse:\n\t\t\tself.controller.Handler.GameParams[\"first_move\"] = \"W\"\n\n\t\tself.black_btn.destroy()\n\t\tself.white_btn.destroy()\n\n\t\tself.first_move_label.configure(text=\"First to move: \" + mover)\n\n\t\tself.set_vals.append(\"move\")\n\t\tself.Check_Can_Start()\n\n\tdef Select_Condition(self, condition: str):# This will set the game win condition\n\t\tself.controller.Handler.GameParams[\"game_winner\"] = condition\n\n\t\tif condition == \">\":\n\t\t\tself.condition_label.configure(text=\"The winner is, the player with more discs.\")\n\t\telse:\n\t\t\tself.condition_label.configure(text=\"The winner is, the player with less discs.\")\n\n\t\tself.lesser_score.destroy()\n\t\tself.greater_score.destroy()\n\n\t\tself.set_vals.append(\"win\")\n\t\tself.Check_Can_Start()\n\n\tdef Check_Can_Start (self): # This will start the game if the game can be started\n\t\tif \"rules\" in self.set_vals and\\\n\t\t \"rows\" in self.set_vals and\\\n\t\t \"cols\" in self.set_vals and\\\n\t\t \"move\" in self.set_vals and\\\n\t\t \"win\" in self.set_vals:\n\t\t self.Start_Game_Btn.configure(bg=\"#22ff22\", activebackground=\"#229922\",\n\t\t \tcommand=lambda: self.Start_Custom_Board())\n\n\tdef Start_Custom_Board (self):\n\t\tself.controller.Pages[\"Setup_Board\"].Setup_Board()\n\t\tself.controller.showPage(\"Setup_Board\")\n\t\tself.controller.Pages[\"Setup_Board\"].Instructions_Display()\n\nclass Custom_Board (tk.Frame):\n\tFrameName = \"Setup_Board\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__ (self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Title bar\n\t\tself.Title_Frame = tk.Frame(self, bg=\"white\")\n\t\tself.Title_Frame.pack(side=\"top\", fill=\"x\")\n\n\t\t# Title\n\t\ttk.Label(self.Title_Frame, text=\"Create Custom Board\", bg=\"white\", font=FONTS[\"medium\"]).pack(side=\"left\")\n\n\t\t# Start Button\n\t\tstart = tk.Button(self.Title_Frame, text=\"Play\", bg=\"#22ff22\", activebackground=\"#229922\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Start())\n\t\tstart.pack(side=\"right\")\t\t\n\n\n\t\t# Use custom Board check button\n\t\tself.Use_Board = tk.IntVar()\n\n\t\tUse_Board = tk.Checkbutton(self.Title_Frame, text=\"Use custom board\", font=FONTS[\"medium\"],\n\t\t\tbg=\"white\", activebackground=\"white\",\n\t\t\tvar=self.Use_Board, onvalue=1, offvalue=0)\n\t\tUse_Board.pack(side=\"right\", padx=10)\n\n\t\t\n\t\t# Board\n\t\tself.Board_Area = tk.Frame(self, bg=\"#009900\")\n\t\tself.Board_Area.pack(side=\"top\", fill=\"both\", expand=True)\n\n\t\tself.Board = []\n\n\tdef Setup_Board (self):\n\t\tfor widget in self.Board_Area.winfo_children():\n\t\t\twidget.destroy()\n\t\tself.Board = []\n\n\t\t\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\t# Diameter with respond to the length of the shortest side of the board\n\t\t\t\theight = self.Board_Area.winfo_height()\n\t\t\t\twidth = self.Board_Area.winfo_width()\n\n\t\t\t\tif height > width:\n\t\t\t\t\tdiameter = width/self.controller.Handler.GameParams[\"x_size\"]\n\t\t\t\telse:\n\t\t\t\t\tdiameter = height/self.controller.Handler.GameParams[\"y_size\"]\n\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tdisc = wg.Disc(self.Board_Area, self.controller, diameter=diameter, mode=\"setup\")\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\tdef Parse_Board (self) -> list: # This will parse the GUI board and create a board that will work for the Game()\n\t\tnew_board = []\n\t\tfor row in self.Board:\n\t\t\tnew_row = []\n\t\t\tfor disc in row:\n\t\t\t\tif disc.Current_Color == \"white\":\n\t\t\t\t\tnew_row.append(\"W\")\n\t\t\t\telif disc.Current_Color == \"black\":\n\t\t\t\t\tnew_row.append(\"B\")\n\t\t\t\telse:\n\t\t\t\t\tnew_row.append(None)\n\t\t\tnew_board.append(new_row)\n\n\t\treturn new_board\n\n\tdef Instructions_Display(self):\n\t\tshowinfo(\"How to use\", \"Click on a tile to cycle between white, black or empty. Check the \\\"Use Custom Board\\\" box to use this board!\")\n\n\tdef Start (self): # This will check if the user wants to use a custom board and then will set Game board to be the users selection\n\t\tif self.Use_Board.get():\n\t\t\tself.controller.Handler.GameParams[\"board\"] = self.Parse_Board()\n\t\tself.controller.Begin_Game()\n\t\tself.controller.Pages[\"Game\"].__GUI_init__()\n\t\tself.controller.Pages[\"Game\"].Update_Board()\n\t\tself.controller.showPage(\"Game\")\n\nclass Game (tk.Frame): # This is the 'stage' where the game will be played.\n\tFrameName = \"Game\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Status Bar\n\t\tself.Status_Bar = tk.Frame(self, bg=\"white\")\n\t\tself.Status_Bar.pack(side=\"top\", fill=\"x\")\n\n\t\tself.Status_Bar.grid_columnconfigure(0, weight=1)\n\t\tself.Status_Bar.grid_columnconfigure(1, weight=1)\n\t\tself.Status_Bar.grid_columnconfigure(2, weight=1)\n\t\tself.Status_Bar.grid_rowconfigure(0, weight=1)\n\n\t\tself.Current_Player = tk.Label(self.Status_Bar, text=\"None\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Current_Player.grid(row=0, column=0)\n\n\t\tself.Game_Type = tk.Label(self.Status_Bar, text=\"FULL\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Game_Type.grid(row=0, column=1)\n\n\t\tself.Score = tk.Label(self.Status_Bar, text=\"Black: 2 | 2:White\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Score.grid(row=0, column=2)\n\n\t\t# Board\n\t\tself.Board_Area = tk.Frame(self, bg=\"#009900\")\n\t\tself.Board_Area.pack(side=\"top\", fill=\"both\", expand=True)\n\n\t\tself.Board = []\n\n\tdef __GUI_init__ (self): # This will initiate the game board once all the datya is provided.\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\t# Diameter with respond to the length of the shortest side of the board\n\t\t\t\theight = self.Board_Area.winfo_height()\n\t\t\t\twidth = self.Board_Area.winfo_width()\n\n\t\t\t\tif height > width:\n\t\t\t\t\tdiameter = width/self.controller.Handler.GameParams[\"x_size\"]\n\t\t\t\telse:\n\t\t\t\t\tdiameter = height/self.controller.Handler.GameParams[\"y_size\"]\n\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tdisc = wg.Disc(self.Board_Area, self.controller, diameter=diameter,\n\t\t\t\t\tcommand= lambda x=x, y=y: self.Disc_Function(x, y))\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\t\tself.Update_Board()\n\n\tdef Reset_Game(self): #This will reset the game board to its initial state\n\t\tself.Board = []\n\t\tfor widget in self.Board_Area.winfo_children():\n\t\t\twidget.destroy()\n\n\tdef Disc_Function (self, x: int, y: int): # This is the function run when the player clicks a disc slot/disc\n\t\tif not self.controller.Handler.Move(x+1, y+1): # Try run the Move function on the Handler\n\t\t\tself.Invalid_Move()\n\n\tdef Invalid_Move(self): # This command will run when a player tries to make a move thats not possible\n\t\tshowerror(\"Invalid Move\", \"You cannot move there!\")\n\n\tdef Update_Board (self): # Update the board to mathe the Game() board\n\t\tfor y in range(len(self.Board)):\n\t\t\tfor x in range(len(self.Board[y])):\n\t\t\t\tgame_piece = self.controller.Handler.Game.Board[y][x]\n\t\t\t\tif game_piece == None:\n\t\t\t\t\tpass\n\t\t\t\telif game_piece == \"B\":\n\t\t\t\t\tif self.Board[y][x].Current_Color != \"black\":\n\t\t\t\t\t\tself.Board[y][x].Set_Piece_Color(\"black\")\n\t\t\t\telif game_piece == \"W\":\n\t\t\t\t\tif self.Board[y][x].Current_Color != \"white\":\n\t\t\t\t\t\tself.Board[y][x].Set_Piece_Color(\"white\")\n\n\tdef Update_Current_Player (self): # Update the current player identifier\n\t\tself.Current_Player.config(text=\"Turn: \" + self.controller.Get_Current_Player())\n\n\tdef Update_Game_Type(self): # Update the game type identifier\n\t\tg_type = self.controller.Handler.Get_Game_Type()\n\t\tself.Game_Type.configure(text=\"Rules: \" + g_type)\n\n\tdef Update_Score (self): # Update the score identifier\n\t\tb, w = self.controller.Handler.Get_Score()\n\t\tself.Score.configure(text=\"Black: {0!s} | {1!s} :White\".format(b, w))\n\n\tdef Full_Update(self): # Run a full update on the graphics\n\t\tself.Update_Score()\n\t\tself.Update_Current_Player()\n\t\tself.Update_Board()\n\nclass Postgame (tk.Frame): # The 'end game' screen\n\tFrameName = \"Postgame\"\n\tdef __init__ (self, parent, controller):\n\t\ttk.Frame.__init__(self, parent)\n\n\t\tself.controller = controller\n\t\tself.configure(bg=\"white\")\n\n\t\t# Set a page title\n\t\tself.Title = tk.Label(self, text=\"Game Over!\", bg=\"white\", font=FONTS[\"large\"])\n\t\tself.Title.pack(side=\"top\")\n\n\t\tSeparator(self, orient=\"horizontal\").pack(side=\"top\", fill=\"x\", padx=10)\n\n\t\t# Set the winner text object\n\t\tself.Winner = tk.Label(self, text=\"The winner is black-discs.\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Winner.pack(side=\"top\")\n\n\t\t# Create the replay and exit buttons\n\t\tself.Buttons = tk.Frame(self, bg=\"white\")\n\t\tself.Buttons.pack()\n\n\t\tReplay = tk.Button(self.Buttons, text=\"Replay\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Replay())\n\t\tReplay.grid(row=0, column=0)\n\n\t\tQuit = tk.Button(self.Buttons, text=\"Quit\", bg=\"#bbbbbb\", font=FONTS[\"medium\"],\n\t\t\tcommand=lambda: self.Quit())\n\t\tQuit.grid(row=0, column=1)\n\n\t\t# the area for the board output\n\t\tself.Board_Area = tk.Frame(self, bg=\"white\")\n\t\tself.Board_Area.pack(side=\"bottom\")\n\n\t\t# Score text\n\t\tself.Score = tk.Label(self.Board_Area, text=\"\", bg=\"white\", font=FONTS[\"medium\"])\n\t\tself.Score.pack()\n\n\t\t# The display for the board\n\t\tself.Board_Display = tk.Frame(self.Board_Area, bg=\"green\")\n\t\tself.Board_Display.pack()\n\n\t\tself.Board = []\n\n\tdef Replay(self): # Initiate the Replay\n\t\tself.controller.Replay()\n\n\tdef Quit(self): # Kill the game\n\t\tself.controller.destroy()\n\t\texit()\n\n\tdef Update_Board (self): # Update the game board display, kill old, create new\n\t\tfor widget in self.Board_Display.winfo_children():\n\t\t\twidget.destroy()\n\n\t\tfor y in range(self.controller.Handler.GameParams[\"y_size\"]):\n\t\t\trow = []\n\t\t\tfor x in range(self.controller.Handler.GameParams[\"x_size\"]):\n\t\t\t\tself.Board_Area.grid_columnconfigure(x, weight=1)\n\t\t\t\tself.Board_Area.grid_rowconfigure(y, weight=1)\n\n\t\t\t\tcol = None\n\t\t\t\tplace_col = self.controller.Handler.Game.Board[y][x]\n\t\t\t\tif place_col == \"B\":\n\t\t\t\t\tcol = \"black\"\n\t\t\t\telif place_col == \"W\":\n\t\t\t\t\tcol = \"white\"\n\n\t\t\t\tdisc = wg.Disc(self.Board_Display, self.controller, col=col, diameter=50)\n\t\t\t\tdisc.grid(row=y, column=x, sticky=\"nsew\")\n\t\t\t\trow.append(disc)\n\n\t\t\tself.Board.append(row)\n\n\tdef Update(self): # Update the whole page\n\t\twinner, scores = self.controller.Handler.Get_Winner() \n\t\tif winner.lower() == \"b\":\n\t\t\twinner = \"black-discs\"\n\t\telif winner.lower() == \"w\":\n\t\t\twinner = \"white-discs\"\n\t\telse:\n\t\t\twinner == \"no one\"\n\t\tself.Winner.configure(text=\"The winner is \" + winner)\n\t\tself.Score.configure(text=\"Black: {0!s} | {1!s}:White\".format(scores[0], scores[1]))\n\t\tself.Update_Board()\n\nif __name__ == \"__main__\":\n\tWindow = Handler()\n",
"step-ids": [
39,
40,
41,
52,
59
]
}
|
[
39,
40,
41,
52,
59
] |
import numpy as np
class Element(object):
def __init__(self):
self.ndof = 0
self.nn = 0
self.ng = 0
self.element_type = 0
self.coord_position = np.array([])
self.setup()
def setup(self):
pass
def shape_function_value(self):
pass
def shape_function_partial(self):
pass
|
normal
|
{
"blob_id": "ed2ae166c4881289b27b7e74e212ba2d6164998b",
"index": 2981,
"step-1": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n <mask token>\n <mask token>\n\n def shape_function_partial(self):\n pass\n",
"step-2": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n <mask token>\n\n def shape_function_partial(self):\n pass\n",
"step-3": "<mask token>\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n\n def shape_function_value(self):\n pass\n\n def shape_function_partial(self):\n pass\n",
"step-4": "import numpy as np\n\n\nclass Element(object):\n\n def __init__(self):\n self.ndof = 0\n self.nn = 0\n self.ng = 0\n self.element_type = 0\n self.coord_position = np.array([])\n self.setup()\n\n def setup(self):\n pass\n\n def shape_function_value(self):\n pass\n\n def shape_function_partial(self):\n pass\n",
"step-5": null,
"step-ids": [
3,
4,
5,
6
]
}
|
[
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
<|reserved_special_token_0|>
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
if __name__ == '__main__':
a, b, r = read_input_RS()
d, e = generate_combinations(a, b, r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import itertools as itt
import numpy as np
import matplotlib.pyplot as plt
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
if __name__ == '__main__':
a, b, r = read_input_RS()
d, e = generate_combinations(a, b, r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
<|reserved_special_token_1|>
# Code Rodrigo
'''
This script, basically generates all he possible combinations
to be analyzed according to the Dempster Shafer Theory.
It requires to define beforehand, the combination of variables
that lead to the higher and lower bound for a given combination
of random sets, via the sensitivity analysis
'''
import itertools as itt
import numpy as np
import matplotlib.pyplot as plt
def read_input_RS ():
low=np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = (np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1))))
return lower_bound, upper_bound, low[0,:].size
def generate_combinations (lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input,
def independent_probability ():
probability_assignment = (np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1))
return probability_assignment
if __name__ == "__main__":
a,b,r=read_input_RS ()
#c=a[0,:].size
d,e=generate_combinations (a,b,r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
#b=read_input_RS ()
#c=generate_combinations (a,b)
|
flexible
|
{
"blob_id": "4b44f4343da1677b5436ec2b153e573fda3c0cee",
"index": 2280,
"step-1": "<mask token>\n\n\ndef read_input_RS():\n low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)\n lower_bound = np.ravel(low)\n upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',\n delimiter=',', skiprows=1)))\n return lower_bound, upper_bound, low[0, :].size\n\n\n<mask token>\n\n\ndef independent_probability():\n probability_assignment = np.loadtxt('ProbabilityAssignment.csv',\n delimiter=',', skiprows=1)\n return probability_assignment\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_input_RS():\n low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)\n lower_bound = np.ravel(low)\n upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',\n delimiter=',', skiprows=1)))\n return lower_bound, upper_bound, low[0, :].size\n\n\ndef generate_combinations(lower, upper, n):\n lower_input = itt.combinations(lower, n)\n upper_input = np.array(list(itt.product(upper, repeat=n)))\n return lower_input, upper_input\n\n\ndef independent_probability():\n probability_assignment = np.loadtxt('ProbabilityAssignment.csv',\n delimiter=',', skiprows=1)\n return probability_assignment\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef read_input_RS():\n low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)\n lower_bound = np.ravel(low)\n upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',\n delimiter=',', skiprows=1)))\n return lower_bound, upper_bound, low[0, :].size\n\n\ndef generate_combinations(lower, upper, n):\n lower_input = itt.combinations(lower, n)\n upper_input = np.array(list(itt.product(upper, repeat=n)))\n return lower_input, upper_input\n\n\ndef independent_probability():\n probability_assignment = np.loadtxt('ProbabilityAssignment.csv',\n delimiter=',', skiprows=1)\n return probability_assignment\n\n\nif __name__ == '__main__':\n a, b, r = read_input_RS()\n d, e = generate_combinations(a, b, r)\n print(b)\n print(e)\n np.savetxt('test.out', e, delimiter=',')\n",
"step-4": "<mask token>\nimport itertools as itt\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef read_input_RS():\n low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)\n lower_bound = np.ravel(low)\n upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',\n delimiter=',', skiprows=1)))\n return lower_bound, upper_bound, low[0, :].size\n\n\ndef generate_combinations(lower, upper, n):\n lower_input = itt.combinations(lower, n)\n upper_input = np.array(list(itt.product(upper, repeat=n)))\n return lower_input, upper_input\n\n\ndef independent_probability():\n probability_assignment = np.loadtxt('ProbabilityAssignment.csv',\n delimiter=',', skiprows=1)\n return probability_assignment\n\n\nif __name__ == '__main__':\n a, b, r = read_input_RS()\n d, e = generate_combinations(a, b, r)\n print(b)\n print(e)\n np.savetxt('test.out', e, delimiter=',')\n",
"step-5": "# Code Rodrigo\n\n'''\nThis script, basically generates all he possible combinations\nto be analyzed according to the Dempster Shafer Theory.\nIt requires to define beforehand, the combination of variables\nthat lead to the higher and lower bound for a given combination\nof random sets, via the sensitivity analysis\n'''\n\nimport itertools as itt\nimport numpy as np\nimport matplotlib.pyplot as plt\n\ndef read_input_RS ():\n low=np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)\n lower_bound = np.ravel(low)\n upper_bound = (np.ravel(np.transpose(np.loadtxt('UpperArray.csv',\n delimiter=',', skiprows=1))))\n return lower_bound, upper_bound, low[0,:].size\n\ndef generate_combinations (lower, upper, n):\n lower_input = itt.combinations(lower, n)\n upper_input = np.array(list(itt.product(upper, repeat=n)))\n return lower_input, upper_input,\n\ndef independent_probability ():\n probability_assignment = (np.loadtxt('ProbabilityAssignment.csv',\n delimiter=',', skiprows=1))\n return probability_assignment\n\nif __name__ == \"__main__\":\n a,b,r=read_input_RS ()\n #c=a[0,:].size\n d,e=generate_combinations (a,b,r)\n print(b)\n print(e)\n np.savetxt('test.out', e, delimiter=',')\n\n#b=read_input_RS ()\n#c=generate_combinations (a,b)\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import math
import turtle
wn = turtle.Screen()
wn.bgcolor('lightblue')
PI=3.14
R_outer=50
R_inner=200
fred = turtle.Turtle()
fred.speed(99999)
def cycloid(r, k, nos_cycle, direction):
n=36
angle=2*PI/n
x=1
y=0
for i in range(nos_cycle*n):
beta = i * angle
x = r*(beta-math.sin(beta))
y = r*(1-math.cos(beta))
### equally valid
###x = ((r)*math.cos(beta) + r*beta)
###y = direction*(r)*math.sin(beta)
fred.goto(x,y)
cycloid(10, 0.1, 100, -1)
wn.exitonclick()
|
normal
|
{
"blob_id": "a62dd287f9fc6f79ef95a3de83f52c794efe00a7",
"index": 7407,
"step-1": "\nimport math\nimport turtle\n\nwn = turtle.Screen()\nwn.bgcolor('lightblue')\nPI=3.14\nR_outer=50\nR_inner=200\n\nfred = turtle.Turtle()\nfred.speed(99999)\n\ndef cycloid(r, k, nos_cycle, direction):\n n=36\n angle=2*PI/n\n x=1\n y=0\n for i in range(nos_cycle*n):\n\t beta = i * angle \n\t x = r*(beta-math.sin(beta))\n\t y = r*(1-math.cos(beta))\n\t ### equally valid\n ###x = ((r)*math.cos(beta) + r*beta)\n ###y = direction*(r)*math.sin(beta)\n fred.goto(x,y)\n\ncycloid(10, 0.1, 100, -1)\n\nwn.exitonclick()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def send_value(value):
port = create_port()
status = get_mov_parameters()[0]
if port_status(port):
if status == '1' or status == 'True':
string = ''.join([str(value), ' \n'])
port.write(string.encode())
print('True')
else:
print('False')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_port():
port = get_mov_parameters()[1]
try:
ser = serial.Serial(port=port, baudrate=9600, timeout=1)
return ser
except:
print('Open port failded')
change_mov_parameters('0', port, '0', '0')
return False
def port_status(ser):
if ser.isOpen():
if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':
return True
else:
try:
create_port()
return True
except:
print('error opening')
change_mov_parameters('0', get_mov_parameters()[1], '0', '0')
return False
<|reserved_special_token_0|>
def send_value(value):
port = create_port()
status = get_mov_parameters()[0]
if port_status(port):
if status == '1' or status == 'True':
string = ''.join([str(value), ' \n'])
port.write(string.encode())
print('True')
else:
print('False')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_port():
port = get_mov_parameters()[1]
try:
ser = serial.Serial(port=port, baudrate=9600, timeout=1)
return ser
except:
print('Open port failded')
change_mov_parameters('0', port, '0', '0')
return False
def port_status(ser):
if ser.isOpen():
if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':
return True
else:
try:
create_port()
return True
except:
print('error opening')
change_mov_parameters('0', get_mov_parameters()[1], '0', '0')
return False
def close_port(ser):
ser.close()
def send_value(value):
port = create_port()
status = get_mov_parameters()[0]
if port_status(port):
if status == '1' or status == 'True':
string = ''.join([str(value), ' \n'])
port.write(string.encode())
print('True')
else:
print('False')
<|reserved_special_token_1|>
import serial
import time
from Files_management import get_mov_parameters, change_mov_parameters
def create_port():
port = get_mov_parameters()[1]
try:
ser = serial.Serial(port=port, baudrate=9600, timeout=1)
return ser
except:
print('Open port failded')
change_mov_parameters('0', port, '0', '0')
return False
def port_status(ser):
if ser.isOpen():
if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':
return True
else:
try:
create_port()
return True
except:
print('error opening')
change_mov_parameters('0', get_mov_parameters()[1], '0', '0')
return False
def close_port(ser):
ser.close()
def send_value(value):
port = create_port()
status = get_mov_parameters()[0]
if port_status(port):
if status == '1' or status == 'True':
string = ''.join([str(value), ' \n'])
port.write(string.encode())
print('True')
else:
print('False')
<|reserved_special_token_1|>
import serial
import time
from Files_management import get_mov_parameters,change_mov_parameters
#-------------------------------------------------------------------------------
def create_port():
port = get_mov_parameters()[1]
try:
ser = serial.Serial(port=port,baudrate=9600,timeout=1)
return ser
except:
print('Open port failded')
change_mov_parameters('0',port,'0','0')
return False
#-------------------------------------------------------------------------------
def port_status(ser):
if(ser.isOpen()):
if(get_mov_parameters()[0] == "1" or get_mov_parameters()[0] == "True"):
return True
else:
try:
create_port()
return True
except:
print("error opening")
change_mov_parameters('0',get_mov_parameters()[1],'0','0')
return False
#-------------------------------------------------------------------------------
def close_port(ser):
ser.close()
#-------------------------------------------------------------------------------
def send_value(value):
port = create_port()
status = get_mov_parameters()[0]
if(port_status(port)):
if(status == '1' or status == 'True'):
string = "".join([str(value),' \n'])
port.write(string.encode())
print('True')
else :
print('False')
|
flexible
|
{
"blob_id": "72cda573bf9c744213a2957d51171f437f211353",
"index": 3467,
"step-1": "<mask token>\n\n\ndef send_value(value):\n port = create_port()\n status = get_mov_parameters()[0]\n if port_status(port):\n if status == '1' or status == 'True':\n string = ''.join([str(value), ' \\n'])\n port.write(string.encode())\n print('True')\n else:\n print('False')\n",
"step-2": "<mask token>\n\n\ndef create_port():\n port = get_mov_parameters()[1]\n try:\n ser = serial.Serial(port=port, baudrate=9600, timeout=1)\n return ser\n except:\n print('Open port failded')\n change_mov_parameters('0', port, '0', '0')\n return False\n\n\ndef port_status(ser):\n if ser.isOpen():\n if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':\n return True\n else:\n try:\n create_port()\n return True\n except:\n print('error opening')\n change_mov_parameters('0', get_mov_parameters()[1], '0', '0')\n return False\n\n\n<mask token>\n\n\ndef send_value(value):\n port = create_port()\n status = get_mov_parameters()[0]\n if port_status(port):\n if status == '1' or status == 'True':\n string = ''.join([str(value), ' \\n'])\n port.write(string.encode())\n print('True')\n else:\n print('False')\n",
"step-3": "<mask token>\n\n\ndef create_port():\n port = get_mov_parameters()[1]\n try:\n ser = serial.Serial(port=port, baudrate=9600, timeout=1)\n return ser\n except:\n print('Open port failded')\n change_mov_parameters('0', port, '0', '0')\n return False\n\n\ndef port_status(ser):\n if ser.isOpen():\n if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':\n return True\n else:\n try:\n create_port()\n return True\n except:\n print('error opening')\n change_mov_parameters('0', get_mov_parameters()[1], '0', '0')\n return False\n\n\ndef close_port(ser):\n ser.close()\n\n\ndef send_value(value):\n port = create_port()\n status = get_mov_parameters()[0]\n if port_status(port):\n if status == '1' or status == 'True':\n string = ''.join([str(value), ' \\n'])\n port.write(string.encode())\n print('True')\n else:\n print('False')\n",
"step-4": "import serial\nimport time\nfrom Files_management import get_mov_parameters, change_mov_parameters\n\n\ndef create_port():\n port = get_mov_parameters()[1]\n try:\n ser = serial.Serial(port=port, baudrate=9600, timeout=1)\n return ser\n except:\n print('Open port failded')\n change_mov_parameters('0', port, '0', '0')\n return False\n\n\ndef port_status(ser):\n if ser.isOpen():\n if get_mov_parameters()[0] == '1' or get_mov_parameters()[0] == 'True':\n return True\n else:\n try:\n create_port()\n return True\n except:\n print('error opening')\n change_mov_parameters('0', get_mov_parameters()[1], '0', '0')\n return False\n\n\ndef close_port(ser):\n ser.close()\n\n\ndef send_value(value):\n port = create_port()\n status = get_mov_parameters()[0]\n if port_status(port):\n if status == '1' or status == 'True':\n string = ''.join([str(value), ' \\n'])\n port.write(string.encode())\n print('True')\n else:\n print('False')\n",
"step-5": "import serial\nimport time\nfrom Files_management import get_mov_parameters,change_mov_parameters\n\n#-------------------------------------------------------------------------------\ndef create_port():\n port = get_mov_parameters()[1]\n try:\n ser = serial.Serial(port=port,baudrate=9600,timeout=1)\n return ser\n except:\n print('Open port failded')\n change_mov_parameters('0',port,'0','0')\n return False\n\n#-------------------------------------------------------------------------------\ndef port_status(ser):\n if(ser.isOpen()):\n if(get_mov_parameters()[0] == \"1\" or get_mov_parameters()[0] == \"True\"):\n return True\n else: \n try:\n create_port()\n return True\n except:\n print(\"error opening\")\n change_mov_parameters('0',get_mov_parameters()[1],'0','0')\n return False\n\n#-------------------------------------------------------------------------------\ndef close_port(ser):\n ser.close()\n\n#-------------------------------------------------------------------------------\ndef send_value(value):\n port = create_port()\n status = get_mov_parameters()[0]\n if(port_status(port)):\n if(status == '1' or status == 'True'):\n string = \"\".join([str(value),' \\n'])\n port.write(string.encode())\n print('True')\n else :\n print('False')\n \n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
import gdalnumeric
#Input File
src = "../dati/islands/islands.tif"
#Output
tgt = "../dati/islands/islands_classified.jpg"
srcArr = gdalnumeric.LoadFile(src)
classes = gdalnumeric.numpy.histogram(srcArr,bins=2)[1]
print classes
#Color look-up table (LUT) - must be len(classes)+1.
#Specified as R,G,B tuples
lut = [[255,0,0],[0,0,0],[255,255,255]]
start = 1
rgb = gdalnumeric.numpy.zeros((3, srcArr.shape[0], srcArr.shape[1],),gdalnumeric.numpy.float32)
# Process all classes and assign colors
for i in range(len(classes)):
mask = gdalnumeric.numpy.logical_and(start <= srcArr, srcArr <= classes[i])
for j in range(len(lut[i])):
rgb[j] = gdalnumeric.numpy.choose(mask, (rgb[j], lut[i][j]))
start = classes[i]+1
# Save the image
gdalnumeric.SaveArray(rgb.astype(gdalnumeric.numpy.uint8), tgt, format="GTIFF",prototype=src)
|
normal
|
{
"blob_id": "f29d377e8a8fd6d2e156da665478d7a4c167f7d5",
"index": 3601,
"step-1": "import gdalnumeric\n\n#Input File\nsrc = \"../dati/islands/islands.tif\"\n\n#Output\ntgt = \"../dati/islands/islands_classified.jpg\"\n\nsrcArr = gdalnumeric.LoadFile(src)\n\nclasses = gdalnumeric.numpy.histogram(srcArr,bins=2)[1]\nprint classes\n\n#Color look-up table (LUT) - must be len(classes)+1.\n#Specified as R,G,B tuples\nlut = [[255,0,0],[0,0,0],[255,255,255]]\n\nstart = 1\n\nrgb = gdalnumeric.numpy.zeros((3, srcArr.shape[0], srcArr.shape[1],),gdalnumeric.numpy.float32)\n\n# Process all classes and assign colors\nfor i in range(len(classes)):\n mask = gdalnumeric.numpy.logical_and(start <= srcArr, srcArr <= classes[i])\n for j in range(len(lut[i])):\n rgb[j] = gdalnumeric.numpy.choose(mask, (rgb[j], lut[i][j]))\n start = classes[i]+1\n\n# Save the image\ngdalnumeric.SaveArray(rgb.astype(gdalnumeric.numpy.uint8), tgt, format=\"GTIFF\",prototype=src)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.shortcuts import render
from django.shortcuts import redirect
# Create your views here.
from .forms import AddBookForm ,UpdateBookForm,BookCreateModelForm,SearchForm,RegistrationForm,SignInForm
from book.models import Books
from django.contrib.auth import authenticate,login,logout
def book_add(request):
if request.user.is_authenticated:
context = {}
if request.method == "GET":
form = BookCreateModelForm()
context["form"] = form
return render(request, "addbook.html", context)
elif request.method == "POST":
context = {}
form = BookCreateModelForm(request.POST)
if form.is_valid():
form.save()
# context["form"] = form
# book_name = form.cleaned_data["book_name"]
# author= form.cleaned_data["author"]
# category=form.cleaned_data["category"]
# prices=form.cleaned_data["price"]
# copies=form.cleaned_data["number_copies"]
# print(book_name,author,category,prices,copies)
# book=Books(book_name=book_name,author=author,category=category,price=prices,copies=copies)
# book.save()
return redirect("index")
else:
return render(request, "addbook.html",context)
else:
return redirect('singn')
def get_books(request):
if request.user.is_authenticated:
form=SearchForm()
context = {}
books=Books.objects.all()
context["books"]=books
context['form']=form
if request.method=="POST":
form=SearchForm(request.POST)
if form.is_valid():
book_name=form.cleaned_data["book_name"]
books=Books.objects.filter(book_name__contains=book_name)
context['books']=books
return render(request,"book_list.html",context)
else:
context['form']=form
return render(request, "book_list.html", context)
return render(request, "book_list.html", context)
else:
return redirect('singn')
def book_details(request,id):
if request.user.is_authenticated:
book=Books.objects.get(id=id)
context = {}
context["book"]=book
return render(request,"book_details.html",context)
else:
return redirect('singn')
def remove_book(request,id):
if request.user.is_authenticated:
book=Books.objects.get(id=id)
book.delete()
return redirect("books")
else:
return redirect('singn')
def update_book(request,id):
if request.user.is_authenticated:
book = Books.objects.get(id=id)
form=BookCreateModelForm(instance=book)
# form=BookCreateModelForm(initial={
# "book_name":book.book_name,
# "author":book.author,
# "category":book.category,
# "price":book.price,
# "number_copies":book.copies})
context = {}
context['form']=form
if request.method=="POST":
book = Books.objects.get(id=id)
form=BookCreateModelForm(instance=book,data=request.POST)
if form.is_valid():
form.save()
# form=BookCreateModelForm(request.POST)
#
# if form.is_valid():
# book.book_name=form.cleaned_data["book_name"]
# book.author=form.cleaned_data["author"]
# book.category=form.cleaned_data["category"]
# book.price=form.cleaned_data["price"]
# book.copies=form.cleaned_data["number_copies"]
# book.save()
return redirect("books")
else:
form=BookCreateModelForm(request.POST)
context["form"]=form
print(form)
return render(request, "edit.html", context)
return render(request,"edit.html",context)
else:
return redirect('singn')
def create_account(request):
form=RegistrationForm()
context={'form':form}
if request.method=="POST":
form=RegistrationForm(request.POST)
if form.is_valid():
form.save()
print("account created")
return redirect("singn")
else:
context["form"]=form
return render(request, "createaccount.html", context)
return render(request,"createaccount.html",context)
def singn_in(request):
form=SignInForm()
context={'form':form}
if request.method=="POST":
form=SignInForm(request.POST)
if form.is_valid():
username=form.cleaned_data["username"]
password=form.cleaned_data["password"]
user=authenticate(request,username=username,password=password)
if user:
login(request,user)
return redirect("index")
else:
context['form']=form
return render(request, "signin.html", context)
return render(request,"signin.html",context)
def signout(request):
if request.user.is_authenticated:
logout(request)
return redirect("singn")
else:
return redirect('singn')
|
normal
|
{
"blob_id": "aba2a0a262c14f286c278f21ba42871410c174f0",
"index": 953,
"step-1": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\n<mask token>\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n",
"step-3": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\ndef update_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book)\n context = {}\n context['form'] = form\n if request.method == 'POST':\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book, data=request.POST)\n if form.is_valid():\n form.save()\n return redirect('books')\n else:\n form = BookCreateModelForm(request.POST)\n context['form'] = form\n print(form)\n return render(request, 'edit.html', context)\n return render(request, 'edit.html', context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\n<mask token>\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n",
"step-4": "<mask token>\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == 'GET':\n form = BookCreateModelForm()\n context['form'] = form\n return render(request, 'addbook.html', context)\n elif request.method == 'POST':\n context = {}\n form = BookCreateModelForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect('index')\n else:\n return render(request, 'addbook.html', context)\n else:\n return redirect('singn')\n\n\ndef get_books(request):\n if request.user.is_authenticated:\n form = SearchForm()\n context = {}\n books = Books.objects.all()\n context['books'] = books\n context['form'] = form\n if request.method == 'POST':\n form = SearchForm(request.POST)\n if form.is_valid():\n book_name = form.cleaned_data['book_name']\n books = Books.objects.filter(book_name__contains=book_name)\n context['books'] = books\n return render(request, 'book_list.html', context)\n else:\n context['form'] = form\n return render(request, 'book_list.html', context)\n return render(request, 'book_list.html', context)\n else:\n return redirect('singn')\n\n\ndef book_details(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n context = {}\n context['book'] = book\n return render(request, 'book_details.html', context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n book.delete()\n return redirect('books')\n else:\n return redirect('singn')\n\n\ndef update_book(request, id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book)\n context = {}\n context['form'] = form\n if request.method == 'POST':\n book = Books.objects.get(id=id)\n form = BookCreateModelForm(instance=book, data=request.POST)\n if form.is_valid():\n form.save()\n return redirect('books')\n else:\n form = BookCreateModelForm(request.POST)\n context['form'] = form\n print(form)\n return render(request, 'edit.html', context)\n return render(request, 'edit.html', context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form = RegistrationForm()\n context = {'form': form}\n if request.method == 'POST':\n form = RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print('account created')\n return redirect('singn')\n else:\n context['form'] = form\n return render(request, 'createaccount.html', context)\n return render(request, 'createaccount.html', context)\n\n\ndef singn_in(request):\n form = SignInForm()\n context = {'form': form}\n if request.method == 'POST':\n form = SignInForm(request.POST)\n if form.is_valid():\n username = form.cleaned_data['username']\n password = form.cleaned_data['password']\n user = authenticate(request, username=username, password=password)\n if user:\n login(request, user)\n return redirect('index')\n else:\n context['form'] = form\n return render(request, 'signin.html', context)\n return render(request, 'signin.html', context)\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect('singn')\n else:\n return redirect('singn')\n",
"step-5": "from django.shortcuts import render\nfrom django.shortcuts import redirect\n\n\n\n# Create your views here.\nfrom .forms import AddBookForm ,UpdateBookForm,BookCreateModelForm,SearchForm,RegistrationForm,SignInForm\nfrom book.models import Books\nfrom django.contrib.auth import authenticate,login,logout\n\n\n\ndef book_add(request):\n if request.user.is_authenticated:\n context = {}\n if request.method == \"GET\":\n form = BookCreateModelForm()\n context[\"form\"] = form\n return render(request, \"addbook.html\", context)\n elif request.method == \"POST\":\n context = {}\n form = BookCreateModelForm(request.POST)\n \n if form.is_valid():\n form.save()\n # context[\"form\"] = form\n # book_name = form.cleaned_data[\"book_name\"]\n # author= form.cleaned_data[\"author\"]\n # category=form.cleaned_data[\"category\"]\n # prices=form.cleaned_data[\"price\"]\n # copies=form.cleaned_data[\"number_copies\"]\n # print(book_name,author,category,prices,copies)\n # book=Books(book_name=book_name,author=author,category=category,price=prices,copies=copies)\n # book.save()\n return redirect(\"index\")\n else:\n return render(request, \"addbook.html\",context)\n else:\n return redirect('singn')\n\ndef get_books(request):\n if request.user.is_authenticated:\n form=SearchForm()\n context = {}\n books=Books.objects.all()\n context[\"books\"]=books\n context['form']=form\n if request.method==\"POST\":\n form=SearchForm(request.POST)\n if form.is_valid():\n book_name=form.cleaned_data[\"book_name\"]\n books=Books.objects.filter(book_name__contains=book_name)\n context['books']=books\n return render(request,\"book_list.html\",context)\n else:\n context['form']=form\n return render(request, \"book_list.html\", context)\n return render(request, \"book_list.html\", context)\n else:\n return redirect('singn')\n\ndef book_details(request,id):\n if request.user.is_authenticated:\n book=Books.objects.get(id=id)\n context = {}\n context[\"book\"]=book\n return render(request,\"book_details.html\",context)\n else:\n return redirect('singn')\n\n\ndef remove_book(request,id):\n if request.user.is_authenticated:\n book=Books.objects.get(id=id)\n book.delete()\n return redirect(\"books\")\n else:\n return redirect('singn')\ndef update_book(request,id):\n if request.user.is_authenticated:\n book = Books.objects.get(id=id)\n form=BookCreateModelForm(instance=book)\n\n # form=BookCreateModelForm(initial={\n # \"book_name\":book.book_name,\n # \"author\":book.author,\n # \"category\":book.category,\n # \"price\":book.price,\n # \"number_copies\":book.copies})\n context = {}\n context['form']=form\n if request.method==\"POST\":\n book = Books.objects.get(id=id)\n form=BookCreateModelForm(instance=book,data=request.POST)\n if form.is_valid():\n form.save()\n # form=BookCreateModelForm(request.POST)\n #\n # if form.is_valid():\n # book.book_name=form.cleaned_data[\"book_name\"]\n # book.author=form.cleaned_data[\"author\"]\n # book.category=form.cleaned_data[\"category\"]\n # book.price=form.cleaned_data[\"price\"]\n # book.copies=form.cleaned_data[\"number_copies\"]\n # book.save()\n return redirect(\"books\")\n else:\n form=BookCreateModelForm(request.POST)\n context[\"form\"]=form\n print(form)\n return render(request, \"edit.html\", context)\n return render(request,\"edit.html\",context)\n else:\n return redirect('singn')\n\n\ndef create_account(request):\n form=RegistrationForm()\n context={'form':form}\n if request.method==\"POST\":\n form=RegistrationForm(request.POST)\n if form.is_valid():\n form.save()\n print(\"account created\")\n return redirect(\"singn\")\n else:\n context[\"form\"]=form\n return render(request, \"createaccount.html\", context)\n\n return render(request,\"createaccount.html\",context)\n\n\ndef singn_in(request):\n form=SignInForm()\n context={'form':form}\n if request.method==\"POST\":\n form=SignInForm(request.POST)\n if form.is_valid():\n username=form.cleaned_data[\"username\"]\n password=form.cleaned_data[\"password\"]\n user=authenticate(request,username=username,password=password)\n if user:\n login(request,user)\n return redirect(\"index\")\n else:\n context['form']=form\n return render(request, \"signin.html\", context)\n\n\n \n return render(request,\"signin.html\",context)\n\n\ndef signout(request):\n if request.user.is_authenticated:\n logout(request)\n return redirect(\"singn\")\n else:\n return redirect('singn')\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
import chainer
import chainer.functions as F
import numpy as np
import argparse
from model import Generator, Discriminator
from chainer import cuda, serializers
from pathlib import Path
from utils import set_optimizer
from dataset import DatasetLoader
xp = cuda.cupy
cuda.get_device(0).use()
class CycleGANVC2LossCalculator:
def __init__(self):
pass
@staticmethod
def dis_loss(discriminator, y, t):
y_dis = discriminator(y)
t_dis = discriminator(t)
return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))
@staticmethod
def gen_loss(discriminator, y):
y_dis = discriminator(y)
return F.mean(F.softplus(-y_dis))
@staticmethod
def cycle_loss(y, t):
return 10.0 * F.mean_absolute_error(y, t)
@staticmethod
def identity_loss(y, t):
return 5.0 * F.mean_absolute_error(y, t)
def train(epochs,
iterations,
batchsize,
modeldir,
extension,
time_width,
mel_bins,
sampling_rate,
g_learning_rate,
d_learning_rate,
beta1,
beta2,
identity_epoch,
second_step,
src_path,
tgt_path):
# Dataset definiton
dataset = DatasetLoader(src_path,
tgt_path,
extension,
time_width,
mel_bins,
sampling_rate)
print(dataset)
# Model & Optimizer definition
generator_xy = Generator()
generator_xy.to_gpu()
gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)
generator_yx = Generator()
generator_yx.to_gpu()
gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)
discriminator_y = Discriminator()
discriminator_y.to_gpu()
dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)
discriminator_x = Discriminator()
discriminator_x.to_gpu()
dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)
discriminator_xyx = Discriminator()
discriminator_xyx.to_gpu()
dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1, beta2)
discriminator_yxy = Discriminator()
discriminator_yxy.to_gpu()
dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1, beta2)
# Loss function definition
lossfunc = CycleGANVC2LossCalculator()
for epoch in range(epochs):
sum_dis_loss = 0
sum_gen_loss = 0
for batch in range(0, iterations, batchsize):
x, y = dataset.train(batchsize)
xy = generator_xy(x)
xyx = generator_yx(xy)
yx = generator_yx(y)
yxy = generator_xy(yx)
xy.unchain_backward()
xyx.unchain_backward()
yx.unchain_backward()
yxy.unchain_backward()
dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)
dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)
if second_step:
dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)
dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)
discriminator_xyx.cleargrads()
discriminator_yxy.cleargrads()
discriminator_x.cleargrads()
discriminator_y.cleargrads()
dis_loss.backward()
dis_x_opt.update()
dis_y_opt.update()
if second_step:
dis_xyx_opt.update()
dis_yxy_opt.update()
dis_loss.unchain_backward()
xy = generator_xy(x)
xyx = generator_yx(xy)
id_y = generator_xy(y)
yx = generator_yx(y)
yxy = generator_xy(yx)
id_x = generator_yx(x)
gen_loss = lossfunc.gen_loss(discriminator_y, xy)
gen_loss += lossfunc.gen_loss(discriminator_x, yx)
if second_step:
gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)
gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)
gen_loss += lossfunc.cycle_loss(x, xyx)
gen_loss += lossfunc.cycle_loss(y, xyx)
if epoch < identity_epoch:
gen_loss += lossfunc.identity_loss(id_y, y)
gen_loss += lossfunc.identity_loss(id_x, x)
generator_xy.cleargrads()
generator_yx.cleargrads()
gen_loss.backward()
gen_xy_opt.update()
gen_yx_opt.update()
gen_loss.unchain_backward()
sum_dis_loss += dis_loss.data
sum_gen_loss += gen_loss.data
if batch == 0:
serializers.save_npz(f"{modeldir}/generator_xy_{epoch}.model", generator_xy)
serializers.save_npz(f"{modeldir}/generator_yx_{epoch}.model", generator_yx)
print('epoch : {}'.format(epoch))
print('Generator loss : {}'.format(sum_gen_loss / iterations))
print('Discriminator loss : {}'.format(sum_dis_loss / iterations))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="StarGANVC2")
parser.add_argument('--e', type=int, default=50, help="the number of epochs")
parser.add_argument('--i', type=int, default=1000, help="the number of iterations")
parser.add_argument('--b', type=int, default=16, help="batch size")
parser.add_argument('--modeldir', type=Path, default="modeldir", help="model output directory")
parser.add_argument('--ext', type=str, default=".npy", help="extension of training data")
parser.add_argument('--tw', type=int, default=128, help="time width of spectral envelope")
parser.add_argument('--mb', type=int, default=36, help="mel bins of spectral envelope")
parser.add_argument('--sr', type=int, default=22050, help="sampling rate of audio data")
parser.add_argument('--glr', type=float, default=0.0002, help="learning rate of Adam on generator")
parser.add_argument('--dlr', type=float, default=0.0001, help="learning rate of Adam on discriminator")
parser.add_argument('--b1', type=float, default=0.5, help="beta1 of Adam")
parser.add_argument('--b2', type=float, default=0.999, help="beta2 of Adam")
parser.add_argument('--ie', type=int, default=20, help="time spans enabling identity mapping loss")
parser.add_argument('--second', action="store_true", help="enabling second step of adversaria loss")
parser.add_argument('--src', type=Path, help="path which includes source data")
parser.add_argument('--tgt', type=Path, help="path which includes target data")
args = parser.parse_args()
modeldir = args.modeldir
modeldir.mkdir(exist_ok=True)
train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb, args.sr,
args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,
args.src, args.tgt)
|
normal
|
{
"blob_id": "32105a245f6945dbe8749140d811b20d634289bc",
"index": 2481,
"step-1": "<mask token>\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n <mask token>\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\n<mask token>\n",
"step-3": "<mask token>\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='StarGANVC2')\n parser.add_argument('--e', type=int, default=50, help=\n 'the number of epochs')\n parser.add_argument('--i', type=int, default=1000, help=\n 'the number of iterations')\n parser.add_argument('--b', type=int, default=16, help='batch size')\n parser.add_argument('--modeldir', type=Path, default='modeldir', help=\n 'model output directory')\n parser.add_argument('--ext', type=str, default='.npy', help=\n 'extension of training data')\n parser.add_argument('--tw', type=int, default=128, help=\n 'time width of spectral envelope')\n parser.add_argument('--mb', type=int, default=36, help=\n 'mel bins of spectral envelope')\n parser.add_argument('--sr', type=int, default=22050, help=\n 'sampling rate of audio data')\n parser.add_argument('--glr', type=float, default=0.0002, help=\n 'learning rate of Adam on generator')\n parser.add_argument('--dlr', type=float, default=0.0001, help=\n 'learning rate of Adam on discriminator')\n parser.add_argument('--b1', type=float, default=0.5, help='beta1 of Adam')\n parser.add_argument('--b2', type=float, default=0.999, help='beta2 of Adam'\n )\n parser.add_argument('--ie', type=int, default=20, help=\n 'time spans enabling identity mapping loss')\n parser.add_argument('--second', action='store_true', help=\n 'enabling second step of adversaria loss')\n parser.add_argument('--src', type=Path, help=\n 'path which includes source data')\n parser.add_argument('--tgt', type=Path, help=\n 'path which includes target data')\n args = parser.parse_args()\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb,\n args.sr, args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-4": "<mask token>\nxp = cuda.cupy\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='StarGANVC2')\n parser.add_argument('--e', type=int, default=50, help=\n 'the number of epochs')\n parser.add_argument('--i', type=int, default=1000, help=\n 'the number of iterations')\n parser.add_argument('--b', type=int, default=16, help='batch size')\n parser.add_argument('--modeldir', type=Path, default='modeldir', help=\n 'model output directory')\n parser.add_argument('--ext', type=str, default='.npy', help=\n 'extension of training data')\n parser.add_argument('--tw', type=int, default=128, help=\n 'time width of spectral envelope')\n parser.add_argument('--mb', type=int, default=36, help=\n 'mel bins of spectral envelope')\n parser.add_argument('--sr', type=int, default=22050, help=\n 'sampling rate of audio data')\n parser.add_argument('--glr', type=float, default=0.0002, help=\n 'learning rate of Adam on generator')\n parser.add_argument('--dlr', type=float, default=0.0001, help=\n 'learning rate of Adam on discriminator')\n parser.add_argument('--b1', type=float, default=0.5, help='beta1 of Adam')\n parser.add_argument('--b2', type=float, default=0.999, help='beta2 of Adam'\n )\n parser.add_argument('--ie', type=int, default=20, help=\n 'time spans enabling identity mapping loss')\n parser.add_argument('--second', action='store_true', help=\n 'enabling second step of adversaria loss')\n parser.add_argument('--src', type=Path, help=\n 'path which includes source data')\n parser.add_argument('--tgt', type=Path, help=\n 'path which includes target data')\n args = parser.parse_args()\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb,\n args.sr, args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-5": "import chainer\nimport chainer.functions as F\nimport numpy as np\nimport argparse\n\nfrom model import Generator, Discriminator\nfrom chainer import cuda, serializers\nfrom pathlib import Path\nfrom utils import set_optimizer\nfrom dataset import DatasetLoader\n\nxp = cuda.cupy\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs,\n iterations,\n batchsize,\n modeldir,\n extension,\n time_width,\n mel_bins,\n sampling_rate,\n g_learning_rate,\n d_learning_rate,\n beta1,\n beta2,\n identity_epoch,\n second_step,\n src_path,\n tgt_path):\n\n # Dataset definiton\n dataset = DatasetLoader(src_path,\n tgt_path,\n extension,\n time_width,\n mel_bins,\n sampling_rate)\n print(dataset)\n\n # Model & Optimizer definition\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1, beta2)\n\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1, beta2)\n\n # Loss function definition\n lossfunc = CycleGANVC2LossCalculator()\n\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n\n dis_loss.unchain_backward()\n\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n\n if batch == 0:\n serializers.save_npz(f\"{modeldir}/generator_xy_{epoch}.model\", generator_xy)\n serializers.save_npz(f\"{modeldir}/generator_yx_{epoch}.model\", generator_yx)\n\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description=\"StarGANVC2\")\n parser.add_argument('--e', type=int, default=50, help=\"the number of epochs\")\n parser.add_argument('--i', type=int, default=1000, help=\"the number of iterations\")\n parser.add_argument('--b', type=int, default=16, help=\"batch size\")\n parser.add_argument('--modeldir', type=Path, default=\"modeldir\", help=\"model output directory\")\n parser.add_argument('--ext', type=str, default=\".npy\", help=\"extension of training data\")\n parser.add_argument('--tw', type=int, default=128, help=\"time width of spectral envelope\")\n parser.add_argument('--mb', type=int, default=36, help=\"mel bins of spectral envelope\")\n parser.add_argument('--sr', type=int, default=22050, help=\"sampling rate of audio data\")\n parser.add_argument('--glr', type=float, default=0.0002, help=\"learning rate of Adam on generator\")\n parser.add_argument('--dlr', type=float, default=0.0001, help=\"learning rate of Adam on discriminator\")\n parser.add_argument('--b1', type=float, default=0.5, help=\"beta1 of Adam\")\n parser.add_argument('--b2', type=float, default=0.999, help=\"beta2 of Adam\")\n parser.add_argument('--ie', type=int, default=20, help=\"time spans enabling identity mapping loss\")\n parser.add_argument('--second', action=\"store_true\", help=\"enabling second step of adversaria loss\")\n parser.add_argument('--src', type=Path, help=\"path which includes source data\")\n parser.add_argument('--tgt', type=Path, help=\"path which includes target data\")\n args = parser.parse_args()\n\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb, args.sr,\n args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-ids": [
3,
7,
8,
9,
11
]
}
|
[
3,
7,
8,
9,
11
] |
# -- coding: utf-8 --
from django.conf.urls import url
from myapp.view import views
from myapp.view import story
from myapp.view import img # 添加
from myapp.view import login
from myapp.view import tuling
from myapp.view import utilView
from myapp.view.wechat import wechat_modules
from myapp.view import router
urlpatterns = [
url(r'get_img_api$', router.get_img_api),
url(r'add_book$', views.add_book, ),
url(r'show_books$', views.show_books, ),
url(r'add_story$', story.add_story),
url(r'show_storys$', story.show_storys),
url(r'add_comment$', story.add_comment),
url(r'show_comments$', story.show_comments),
url(r'uploadImg$', img.uploadImg),
url(r'showImg$', img.showImg),
url(r'uploadImgForUs$', img.uploadImgForUs),
url(r'showImgForUs', img.showImgForUs),
url(r'add_user', login.add_user),
url(r'login', login.login),
url(r'get_username', login.get_username),
url(r'send_register_email', login.send_register_email),
url(r'check_username', login.check_username),
url(r'chat_with_tuling', tuling.chat_with_tuling),
url(r'utilView_getLive2d', utilView.get_live2d),
url(r'utilView_getRandJson', utilView.get_rand_json),
url(r'get_wechat', wechat_modules.on_get),
url(r'', login.other_request),
]
|
normal
|
{
"blob_id": "373c102018fdcc5211263304c368c2e8beef3257",
"index": 720,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('get_img_api$', router.get_img_api), url('add_book$',\n views.add_book), url('show_books$', views.show_books), url('add_story$',\n story.add_story), url('show_storys$', story.show_storys), url(\n 'add_comment$', story.add_comment), url('show_comments$', story.\n show_comments), url('uploadImg$', img.uploadImg), url('showImg$', img.\n showImg), url('uploadImgForUs$', img.uploadImgForUs), url(\n 'showImgForUs', img.showImgForUs), url('add_user', login.add_user), url\n ('login', login.login), url('get_username', login.get_username), url(\n 'send_register_email', login.send_register_email), url('check_username',\n login.check_username), url('chat_with_tuling', tuling.chat_with_tuling),\n url('utilView_getLive2d', utilView.get_live2d), url(\n 'utilView_getRandJson', utilView.get_rand_json), url('get_wechat',\n wechat_modules.on_get), url('', login.other_request)]\n",
"step-3": "from django.conf.urls import url\nfrom myapp.view import views\nfrom myapp.view import story\nfrom myapp.view import img\nfrom myapp.view import login\nfrom myapp.view import tuling\nfrom myapp.view import utilView\nfrom myapp.view.wechat import wechat_modules\nfrom myapp.view import router\nurlpatterns = [url('get_img_api$', router.get_img_api), url('add_book$',\n views.add_book), url('show_books$', views.show_books), url('add_story$',\n story.add_story), url('show_storys$', story.show_storys), url(\n 'add_comment$', story.add_comment), url('show_comments$', story.\n show_comments), url('uploadImg$', img.uploadImg), url('showImg$', img.\n showImg), url('uploadImgForUs$', img.uploadImgForUs), url(\n 'showImgForUs', img.showImgForUs), url('add_user', login.add_user), url\n ('login', login.login), url('get_username', login.get_username), url(\n 'send_register_email', login.send_register_email), url('check_username',\n login.check_username), url('chat_with_tuling', tuling.chat_with_tuling),\n url('utilView_getLive2d', utilView.get_live2d), url(\n 'utilView_getRandJson', utilView.get_rand_json), url('get_wechat',\n wechat_modules.on_get), url('', login.other_request)]\n",
"step-4": "# -- coding: utf-8 --\nfrom django.conf.urls import url\nfrom myapp.view import views\nfrom myapp.view import story\nfrom myapp.view import img # 添加\nfrom myapp.view import login\nfrom myapp.view import tuling\nfrom myapp.view import utilView\nfrom myapp.view.wechat import wechat_modules\nfrom myapp.view import router\n\nurlpatterns = [\n url(r'get_img_api$', router.get_img_api),\n url(r'add_book$', views.add_book, ),\n url(r'show_books$', views.show_books, ),\n\n url(r'add_story$', story.add_story),\n url(r'show_storys$', story.show_storys),\n\n url(r'add_comment$', story.add_comment),\n url(r'show_comments$', story.show_comments),\n\n url(r'uploadImg$', img.uploadImg),\n url(r'showImg$', img.showImg),\n url(r'uploadImgForUs$', img.uploadImgForUs),\n url(r'showImgForUs', img.showImgForUs),\n\n url(r'add_user', login.add_user),\n url(r'login', login.login),\n url(r'get_username', login.get_username),\n url(r'send_register_email', login.send_register_email),\n url(r'check_username', login.check_username),\n\n url(r'chat_with_tuling', tuling.chat_with_tuling),\n url(r'utilView_getLive2d', utilView.get_live2d),\n url(r'utilView_getRandJson', utilView.get_rand_json),\n\n url(r'get_wechat', wechat_modules.on_get),\n\n url(r'', login.other_request),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
"""
Otsu method for automatic estimation of $T$ threshold value
- assumes two maxima of grayscale histogram & searches for optimal separation
Parameters
Usage
Example
$ python <scriptname>.py --image ../img/<filename>.png
## Explain
"""
import numpy as np
import argparse
import mahotas
import cv2
from numpy.matrixlib.defmatrix import matrix
def main():
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="Path to the image")
args = vars(ap.parse_args())
image = cv2.imread(args["image"])
#preprocessing
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
blurred = cv2.GaussianBlur(image, (5,5), 0)
cv2.imshow("Image", image)
# Otsu
T = mahotas.thresholding.otsu(blurred)
print("[INFO] Otsu's threshold {}".format(T))
thresh = image.copy()
thresh[thresh > T] = 255
thresh[thresh < 255] = 0
thresh = cv2.bitwise_not(thresh)
cv2.imshow("Otsu", thresh)
# Riddler-Calvard
T = mahotas.thresholding.rc(blurred)
print("[INFO] Riddler-Calvard: {}".format(T))
thresh = image.copy()
thresh[thresh > T] = 255
thresh[thresh < 255] = 0
thresh = cv2.bitwise_not(thresh)
cv2.imshow("Riddler-Calvard", thresh)
cv2.waitKey(0)
if __name__=="__main__":
main()
|
normal
|
{
"blob_id": "0547751af7bbac42351476dde591d13d40fb37eb",
"index": 7811,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n ap = argparse.ArgumentParser()\n ap.add_argument('-i', '--image', required=True, help='Path to the image')\n args = vars(ap.parse_args())\n image = cv2.imread(args['image'])\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n blurred = cv2.GaussianBlur(image, (5, 5), 0)\n cv2.imshow('Image', image)\n T = mahotas.thresholding.otsu(blurred)\n print(\"[INFO] Otsu's threshold {}\".format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Otsu', thresh)\n T = mahotas.thresholding.rc(blurred)\n print('[INFO] Riddler-Calvard: {}'.format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Riddler-Calvard', thresh)\n cv2.waitKey(0)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n ap = argparse.ArgumentParser()\n ap.add_argument('-i', '--image', required=True, help='Path to the image')\n args = vars(ap.parse_args())\n image = cv2.imread(args['image'])\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n blurred = cv2.GaussianBlur(image, (5, 5), 0)\n cv2.imshow('Image', image)\n T = mahotas.thresholding.otsu(blurred)\n print(\"[INFO] Otsu's threshold {}\".format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Otsu', thresh)\n T = mahotas.thresholding.rc(blurred)\n print('[INFO] Riddler-Calvard: {}'.format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Riddler-Calvard', thresh)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport numpy as np\nimport argparse\nimport mahotas\nimport cv2\nfrom numpy.matrixlib.defmatrix import matrix\n\n\ndef main():\n ap = argparse.ArgumentParser()\n ap.add_argument('-i', '--image', required=True, help='Path to the image')\n args = vars(ap.parse_args())\n image = cv2.imread(args['image'])\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n blurred = cv2.GaussianBlur(image, (5, 5), 0)\n cv2.imshow('Image', image)\n T = mahotas.thresholding.otsu(blurred)\n print(\"[INFO] Otsu's threshold {}\".format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Otsu', thresh)\n T = mahotas.thresholding.rc(blurred)\n print('[INFO] Riddler-Calvard: {}'.format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow('Riddler-Calvard', thresh)\n cv2.waitKey(0)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n\"\"\"\nOtsu method for automatic estimation of $T$ threshold value\n - assumes two maxima of grayscale histogram & searches for optimal separation\n\nParameters\n\nUsage\n\nExample\n $ python <scriptname>.py --image ../img/<filename>.png\n\n## Explain\n\n\"\"\"\nimport numpy as np\nimport argparse\nimport mahotas\nimport cv2\nfrom numpy.matrixlib.defmatrix import matrix\n\ndef main():\n ap = argparse.ArgumentParser()\n ap.add_argument(\"-i\", \"--image\", required=True, help=\"Path to the image\")\n args = vars(ap.parse_args())\n\n image = cv2.imread(args[\"image\"])\n #preprocessing\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n blurred = cv2.GaussianBlur(image, (5,5), 0)\n cv2.imshow(\"Image\", image)\n\n # Otsu\n T = mahotas.thresholding.otsu(blurred)\n print(\"[INFO] Otsu's threshold {}\".format(T))\n\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow(\"Otsu\", thresh)\n\n # Riddler-Calvard\n T = mahotas.thresholding.rc(blurred)\n print(\"[INFO] Riddler-Calvard: {}\".format(T))\n thresh = image.copy()\n thresh[thresh > T] = 255\n thresh[thresh < 255] = 0\n thresh = cv2.bitwise_not(thresh)\n cv2.imshow(\"Riddler-Calvard\", thresh)\n\n cv2.waitKey(0)\n\nif __name__==\"__main__\":\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from flask import jsonify
from flask.views import MethodView
class Users(MethodView):
def get(self):
return jsonify(
{
'status': 'OK',
'users': [
{'name': 'Pepe', 'age': 35, 'ocupation': "Engineer"},
{'name': 'Bob', 'age': 20, 'ocupation': "Student"}
]
}
)
def post(self):
# create user
pass
def put(self):
# update user
pass
def delete(self):
# delete user
pass
|
normal
|
{
"blob_id": "781ce153d5053078ee11cecc13d055a67999a651",
"index": 3800,
"step-1": "<mask token>\n\n\nclass Users(MethodView):\n <mask token>\n <mask token>\n\n def put(self):\n pass\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n\n def delete(self):\n pass\n",
"step-4": "from flask import jsonify\nfrom flask.views import MethodView\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify({'status': 'OK', 'users': [{'name': 'Pepe', 'age': \n 35, 'ocupation': 'Engineer'}, {'name': 'Bob', 'age': 20,\n 'ocupation': 'Student'}]})\n\n def post(self):\n pass\n\n def put(self):\n pass\n\n def delete(self):\n pass\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom flask import jsonify\nfrom flask.views import MethodView\n\n\nclass Users(MethodView):\n\n def get(self):\n return jsonify(\n {\n 'status': 'OK',\n 'users': [\n {'name': 'Pepe', 'age': 35, 'ocupation': \"Engineer\"},\n {'name': 'Bob', 'age': 20, 'ocupation': \"Student\"}\n ]\n }\n )\n\n def post(self):\n # create user\n pass\n\n def put(self):\n # update user\n pass\n\n def delete(self):\n # delete user\n pass\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
from utils import *
EvinceRelation("different from")
|
normal
|
{
"blob_id": "4f15e2743b33e2f672cd258172da852edb7e4118",
"index": 2103,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nEvinceRelation('different from')\n",
"step-3": "from utils import *\nEvinceRelation('different from')\n",
"step-4": "from utils import *\n\nEvinceRelation(\"different from\")\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from simple_avk.AVK import SimpleAVK
from simple_avk.exceptions import MethodError, LongpollError
|
flexible
|
{
"blob_id": "2bccfba2448059a41185b117b224813e344b50f8",
"index": 5673,
"step-1": "<mask token>\n",
"step-2": "from simple_avk.AVK import SimpleAVK\nfrom simple_avk.exceptions import MethodError, LongpollError\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
cursor.execute('SET NAMES utf8;')
cursor.execute('SET CHARACTER SET utf8;')
cursor.execute('SET character_set_connection=utf8;')
<|reserved_special_token_0|>
cursor.execute(sql)
<|reserved_special_token_0|>
for row in results:
user = dict(zip(column_names, row))
print(user['id'])
exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[
'secret'], 'enableRateLimit': True})
if exchange.has['fetchDeposits']:
withdrawals = exchange.fetch_withdrawals()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], withdraw['fee']['cost'],
date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
withdrawals = exchange.fetch_deposits()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], '0', date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db = mysql_baglan('bingo')
cursor = db.cursor()
cursor.execute('SET NAMES utf8;')
cursor.execute('SET CHARACTER SET utf8;')
cursor.execute('SET character_set_connection=utf8;')
sql = (
"SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc")
cursor.execute(sql)
results = cursor.fetchall()
column_names = ['apikey', 'secret', 'id']
for row in results:
user = dict(zip(column_names, row))
print(user['id'])
exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[
'secret'], 'enableRateLimit': True})
if exchange.has['fetchDeposits']:
withdrawals = exchange.fetch_withdrawals()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], withdraw['fee']['cost'],
date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
withdrawals = exchange.fetch_deposits()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], '0', date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
<|reserved_special_token_1|>
import ccxt
import json
import time
from baglanti import mysql_baglan
import datetime
import requests
from urllib.parse import urljoin
import sys
db = mysql_baglan('bingo')
cursor = db.cursor()
cursor.execute('SET NAMES utf8;')
cursor.execute('SET CHARACTER SET utf8;')
cursor.execute('SET character_set_connection=utf8;')
sql = (
"SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc")
cursor.execute(sql)
results = cursor.fetchall()
column_names = ['apikey', 'secret', 'id']
for row in results:
user = dict(zip(column_names, row))
print(user['id'])
exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[
'secret'], 'enableRateLimit': True})
if exchange.has['fetchDeposits']:
withdrawals = exchange.fetch_withdrawals()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], withdraw['fee']['cost'],
date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
withdrawals = exchange.fetch_deposits()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp']) / 1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw[
'txid'], withdraw['address'], withdraw['type'], withdraw[
'amount'], withdraw['status'], '0', date_time])
sqlguncelleme = (
'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'
)
cursor.executemany(sqlguncelleme, set_data)
db.commit()
<|reserved_special_token_1|>
import ccxt
import json
import time
from baglanti import mysql_baglan
import datetime
import requests
from urllib.parse import urljoin
import sys
db = mysql_baglan("bingo")
cursor = db.cursor()
cursor.execute('SET NAMES utf8;')
cursor.execute('SET CHARACTER SET utf8;')
cursor.execute('SET character_set_connection=utf8;')
sql = "SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc"
cursor.execute(sql)
results = cursor.fetchall()
column_names = ['apikey', 'secret', 'id']
for row in results:
user = dict(zip(column_names, row))
print(user['id'])
exchange = ccxt.binance({
'apiKey': user['apikey'],
'secret': user['secret'],
'enableRateLimit': True
})
#BTC
if exchange.has['fetchDeposits']:
withdrawals = exchange.fetch_withdrawals()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp'])/1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw['txid'], withdraw['address'], withdraw['type'], withdraw['amount'], withdraw['status'], withdraw['fee']['cost'],date_time])
sqlguncelleme = "INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)"
cursor.executemany(sqlguncelleme, set_data,)
db.commit()
withdrawals = exchange.fetch_deposits()
set_data = []
for withdraw in withdrawals:
date_time = int(withdraw['timestamp'])/1000
date_time = datetime.datetime.fromtimestamp(date_time)
set_data.append([user['id'], withdraw['currency'], withdraw['txid'], withdraw['address'], withdraw['type'], withdraw['amount'], withdraw['status'], '0',date_time])
sqlguncelleme = "INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)"
cursor.executemany(sqlguncelleme, set_data,)
db.commit()
|
flexible
|
{
"blob_id": "1d29ce58ca626155d626216fbbd70d7b241efa25",
"index": 6363,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncursor.execute('SET NAMES utf8;')\ncursor.execute('SET CHARACTER SET utf8;')\ncursor.execute('SET character_set_connection=utf8;')\n<mask token>\ncursor.execute(sql)\n<mask token>\nfor row in results:\n user = dict(zip(column_names, row))\n print(user['id'])\n exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[\n 'secret'], 'enableRateLimit': True})\n if exchange.has['fetchDeposits']:\n withdrawals = exchange.fetch_withdrawals()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], withdraw['fee']['cost'],\n date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n withdrawals = exchange.fetch_deposits()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], '0', date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n",
"step-3": "<mask token>\ndb = mysql_baglan('bingo')\ncursor = db.cursor()\ncursor.execute('SET NAMES utf8;')\ncursor.execute('SET CHARACTER SET utf8;')\ncursor.execute('SET character_set_connection=utf8;')\nsql = (\n \"SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc\")\ncursor.execute(sql)\nresults = cursor.fetchall()\ncolumn_names = ['apikey', 'secret', 'id']\nfor row in results:\n user = dict(zip(column_names, row))\n print(user['id'])\n exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[\n 'secret'], 'enableRateLimit': True})\n if exchange.has['fetchDeposits']:\n withdrawals = exchange.fetch_withdrawals()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], withdraw['fee']['cost'],\n date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n withdrawals = exchange.fetch_deposits()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], '0', date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n",
"step-4": "import ccxt\nimport json\nimport time\nfrom baglanti import mysql_baglan\nimport datetime\nimport requests\nfrom urllib.parse import urljoin\nimport sys\ndb = mysql_baglan('bingo')\ncursor = db.cursor()\ncursor.execute('SET NAMES utf8;')\ncursor.execute('SET CHARACTER SET utf8;')\ncursor.execute('SET character_set_connection=utf8;')\nsql = (\n \"SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc\")\ncursor.execute(sql)\nresults = cursor.fetchall()\ncolumn_names = ['apikey', 'secret', 'id']\nfor row in results:\n user = dict(zip(column_names, row))\n print(user['id'])\n exchange = ccxt.binance({'apiKey': user['apikey'], 'secret': user[\n 'secret'], 'enableRateLimit': True})\n if exchange.has['fetchDeposits']:\n withdrawals = exchange.fetch_withdrawals()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], withdraw['fee']['cost'],\n date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n withdrawals = exchange.fetch_deposits()\n set_data = []\n for withdraw in withdrawals:\n date_time = int(withdraw['timestamp']) / 1000\n date_time = datetime.datetime.fromtimestamp(date_time)\n set_data.append([user['id'], withdraw['currency'], withdraw[\n 'txid'], withdraw['address'], withdraw['type'], withdraw[\n 'amount'], withdraw['status'], '0', date_time])\n sqlguncelleme = (\n 'INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)'\n )\n cursor.executemany(sqlguncelleme, set_data)\n db.commit()\n",
"step-5": "import ccxt\r\nimport json\r\nimport time\r\nfrom baglanti import mysql_baglan\r\nimport datetime\r\nimport requests\r\nfrom urllib.parse import urljoin\r\nimport sys\r\n\r\ndb = mysql_baglan(\"bingo\")\r\ncursor = db.cursor()\r\ncursor.execute('SET NAMES utf8;')\r\ncursor.execute('SET CHARACTER SET utf8;')\r\ncursor.execute('SET character_set_connection=utf8;')\r\n\r\nsql = \"SELECT apikey,secret,id FROM `users` WHERE status = '1' order by id desc\"\r\ncursor.execute(sql)\r\nresults = cursor.fetchall()\r\ncolumn_names = ['apikey', 'secret', 'id']\r\nfor row in results:\r\n user = dict(zip(column_names, row))\r\n print(user['id'])\r\n exchange = ccxt.binance({\r\n 'apiKey': user['apikey'],\r\n 'secret': user['secret'],\r\n 'enableRateLimit': True\r\n })\r\n\r\n #BTC\r\n if exchange.has['fetchDeposits']:\r\n withdrawals = exchange.fetch_withdrawals()\r\n set_data = []\r\n for withdraw in withdrawals:\r\n date_time = int(withdraw['timestamp'])/1000\r\n date_time = datetime.datetime.fromtimestamp(date_time)\r\n \r\n set_data.append([user['id'], withdraw['currency'], withdraw['txid'], withdraw['address'], withdraw['type'], withdraw['amount'], withdraw['status'], withdraw['fee']['cost'],date_time])\r\n sqlguncelleme = \"INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)\"\r\n cursor.executemany(sqlguncelleme, set_data,)\r\n db.commit()\r\n\r\n withdrawals = exchange.fetch_deposits()\r\n set_data = []\r\n for withdraw in withdrawals:\r\n date_time = int(withdraw['timestamp'])/1000\r\n date_time = datetime.datetime.fromtimestamp(date_time)\r\n \r\n set_data.append([user['id'], withdraw['currency'], withdraw['txid'], withdraw['address'], withdraw['type'], withdraw['amount'], withdraw['status'], '0',date_time])\r\n sqlguncelleme = \"INSERT INTO transfers (user_id, currency, txid, address, type, amount, status, fee, datetime) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE user_id=(user_id)\"\r\n cursor.executemany(sqlguncelleme, set_data,)\r\n db.commit()\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.