hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3806d22501ee229217c352fc328cf37acd3d175c | 105,214 | py | Python | projects/src/main/python/CodeJam/Y14R5P1/liutianren/generated_py_26641b3d193b4327b8b56443c323055b.py | DynamicCodeSearch/CodeSeer | ee985ece7691691585952eb88565f0e08bdc9113 | [
"MIT"
] | 5 | 2020-04-05T18:04:13.000Z | 2021-04-13T20:34:19.000Z | projects/src/main/python/CodeJam/Y14R5P1/liutianren/generated_py_26641b3d193b4327b8b56443c323055b.py | DynamicCodeSearch/CodeSeer | ee985ece7691691585952eb88565f0e08bdc9113 | [
"MIT"
] | 1 | 2020-04-29T21:42:26.000Z | 2020-05-01T23:45:45.000Z | projects/src/main/python/CodeJam/Y14R5P1/liutianren/generated_py_26641b3d193b4327b8b56443c323055b.py | DynamicCodeSearch/CodeSeer | ee985ece7691691585952eb88565f0e08bdc9113 | [
"MIT"
] | 3 | 2020-01-27T16:02:14.000Z | 2021-02-08T13:25:15.000Z | import sys
sys.path.append('/home/george2/Raise/ProgramRepair/CodeSeer/projects/src/main/python')
from CodeJam.Y14R5P1.liutianren.pa import *
def func_b7ac5fe003534a369b5aec00bcdd371b(A, q, p, r, i, s):
a = s + (i * p + q) % r
A.append(a)
return a
def func_4f647da57d784845a69b2374612069c0(a, A):
A.append(a)
psum = psum + a
return psum
def func_805366c712b34cf3a55833bd94fec175(a, S):
psum = psum + a
S.append(psum)
return psum
def func_0fe8cbfa557948b39902d154be091bcf(A, q, p, r, i, s):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
return a
def func_7795ba23206e4560bafd9f913baf3c8a(A, q, p, r, i, s):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
return psum
def func_32c92e553af945d3a2fbfd4259b5c95b(a, S, A):
A.append(a)
psum = psum + a
S.append(psum)
return psum
def func_731daa6475a7445f9c6f5e8c3133e4d4(S, A, q, p, r, i, s):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return a
def func_aaab17d8f8834d589b74673642ef8123(S, A, q, p, r, i, s):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return psum
def func_9c5c57414433474fb461d57fbba6e671(S, i, j, psum):
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
return pgain
def func_5abf7ed201f04c6f9ab63dfd863a3624(S, i, j, psum):
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
return gain
def func_cc55eea84363492abd4d457ef335dfe4(S, i, j, psum):
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
return gain
def func_5583bc2bfbdb436db8006cebb69b11c4(S, i, j, psum):
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
return mingain
def func_0a4392010bd24f578a84b2444754ce88(pgain, minpgain):
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_506a234433784793b52c21382bdec6ac(pgain, minpgain):
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_347a606abae24f739b8a7473ba73b1a7(S, i, j, psum):
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
return pgain
def func_6cffab610bd843b7b0494046bcfac49b(S, i, j, psum):
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
return gain
def func_7189b258ca134403aa7d3bf71740e5eb(S, i, j, psum):
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
return mingain
def func_e2e18e56ee1644a68e352ce4c48a225a(S, i, minpgain, psum):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_3b6d7b1706644a4e8cd02b41d3501401(S, i, minpgain, psum):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_6a6100622f5e46bba745b1e199d314a2(S, i, minpgain, psum):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_41cc33d58cce46d7971c77c39b92db74(S, i, minpgain, psum):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_080f0976862642d28c2044a7e5976039(S, i, minpgain, psum):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_fc05b86451c349c7ae796b718afaff94():
A = []
S = []
return S
def func_4b5407d3d9044c3896063344877f24b4():
A = []
S = []
return A
def func_f6857b2da9c04116b182776d5013a697():
S = []
psum = 0
return S
def func_2a11215d766d419e9661804ade01216d():
S = []
psum = 0
return psum
def func_f7ecc06fdbc64a7592ff48fbf217c363(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return i
def func_719669b4b6bd47ca87a107ad08579a7b(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return psum
def func_a497461c9b7c4268bd5ff98afa1a682d(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return a
def func_749938a23d494c0f860ed0f702a99efa(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return a
def func_f26a3f234ba84f8d82c074dea317ffed(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return i
def func_4e075b6e59cc415daca8eb5eb2bc2837(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return psum
def func_1cea03d2442c41798726dd74d8d9a32f(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return mingain
def func_0790d9e81a734e8d9aa03f63607f8e2b(psum):
mingain = psum
minpgain = psum
return mingain
def func_c73315b5aa644f0eb7454e6a004a25b7(psum):
mingain = psum
minpgain = psum
return minpgain
def func_efa98545c3854f92960d5c1a6fdbba81(psum):
minpgain = psum
hpi = 0
return hpi
def func_8024920ec57044969cc1e0f764d7dd39(psum):
minpgain = psum
hpi = 0
return minpgain
def func_e6d029c2fb5a421b8bc02f71d9910742(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_42184e20e80e435199203dd2d8bafbbb(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_2ddde746d5d647029870f1e7d940f646(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_da72f6bed5904a43927491e518026c74(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_01d58ea91fb649898ea070248347a561(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_60a9cbb9e203427e8fbf6d2e02204fea(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_43c19d6aafa24da589108f895b791812(N, S, minpgain, psum):
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_a66c2b6a5ae544a9a10c5717d5147b44():
A = []
S = []
psum = 0
return S
def func_d711fb2c4bb5408f964a716fd2760123():
A = []
S = []
psum = 0
return A
def func_bab063a390034a3e907260a91ddb0ba0():
A = []
S = []
psum = 0
return psum
def func_89520863509241c6938cc799077e71ed(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return S
def func_2eafe3a7702d46ee88136df8edf45be5(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return psum
def func_a233020961ec4e3eae425a094fe17764(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return a
def func_ac51824063474411834cf9cbce6da70c(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return i
def func_b4990af39135459d96c62595ee20b9ab(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return i
def func_c449ec15464342e791ff3eacdb9abe1c(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return psum
def func_3f712eb9fb00484188a1911dd770f454(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return a
def func_81480aa91a6f42e282dffc0689cc6bbd(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return a
def func_27824dc4cda1459eaf436c38092b006e(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return mingain
def func_d2d49cb5c9aa4dd3b4ad0f2c3f17a3e1(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return i
def func_fdf9d2a17a5d4f4fa86afd0084854b96(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return psum
def func_b9cdaf3512f0487aa615150415b3eca9(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return mingain
def func_dc400d407d514c66a0f00e41e8c416cc(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return minpgain
def func_30abd218827d43d7ae7a1ba1f3b77afa(psum):
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_57491aff8a7840fea20c04eb0465b1f4(psum):
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_057827decc524a3aa78b85e46c591133(psum):
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_e175c84bbb9e46a285c606fc1873e674(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_7f4d45d037504a43952508ff400301ce(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_d13c64122028415fa348138c5337f1a2(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_2c8d66b8e8494d7da50b3d0e55d7ed02(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_36f780ec8fff4214a3d0d7e62811a8d6(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_56066b587d3542c79fbb1596310a0786(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_c06ca9718d47411abe8e36bf61c063a2(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_70a13f5600474ac199539dba7b92e4a7(N, S, minpgain, psum):
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_b47615c3c44b4e7b8e8b79bf9b340fe7(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return i
def func_f25ec8382d17431a933c4425dbad69a6(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return A
def func_92a7c17153554f5a8b888f0c99ccc908(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return S
def func_c08cef70440541c4b5d7325f9778ff67(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return psum
def func_8f3714cfe25f4fdf991d5f2e8c5b24d4(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
return a
def func_e1eb75ae65ec4a39af2292add3aa0f16(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return i
def func_0eb884871b7246b5b153eae1c91876e0(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return psum
def func_89ea100f3f1d4d37bdcc86501d628aa5(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return S
def func_52f1bb55fd0a4989b6ae8428f0bfd465(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return a
def func_b5e47067f352460981a68e75ac3b488a(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return psum
def func_a23f31349fff479da49c4318b8638a1e(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return mingain
def func_9b97e855cb174e73aee12347d68e4f27(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return a
def func_2217146b87934627b2d3ed27e7420c84(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return i
def func_5029b157c9f84c9898c818c24fc84347(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return a
def func_a0339e408849463c800a71a954f796ea(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return mingain
def func_83059db1c85e426eb8b59f8808e915bb(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return i
def func_614ea52b7064488a8de8a98ca6994015(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return psum
def func_28690fd1eb864bd094f2d2bc5b3ec402(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return minpgain
def func_dd24e6726a204177870c5e212346dfc4(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_7960889ab9c74e799253a4e83fed7fbb(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_cf48a6db17b744bfbb8c939da541da93(psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_a7a0219037b94fe0b320520dcd8e4466(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_69794909e1c34da5972885ad766e0c8c(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_4efbaf10483540bc969bfa12fc937ea4(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_aa2da8ae23014a5a9eb08730f0af5440(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_2cc3e049fe7a4969a68a4b3008c96377(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_8231678d745d4f19b9761387a05eebce(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_03d3eac3541d4b5690de20bf0848fcc2(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_9460447054f641a6bc1cce0105fd27cf(N, S, psum):
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_d0a08a9821a64712bb2ced4e603e702f(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return S
def func_b709b84b68824f22ba16b7ec98ff8702(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return psum
def func_2c4be757d97e49a59bfaa1fc43c50a27(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return A
def func_02d871b3726d4e88b940c88e61b44ff7(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return i
def func_46b38e1011024e409327e7f6b5ff25a9(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
return a
def func_04266cae184b41ba86d02095dd29f6a2(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return S
def func_7152470275014f658fa59b4cdaeaea92(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return psum
def func_b48c882d8e85448c958300c276cfa67e(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return a
def func_4ffd2cd609444843856cca8538fac343(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return mingain
def func_1439a4d9e7c64cdd9bd7077ac3a59089(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return i
def func_5daf051d00124fd0a639fc7f54b08a09(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return minpgain
def func_1354288ccba84eed8ae92c512dac9a21(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return a
def func_0cfb74f92a4e4baf8f392b0dd53a8708(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return psum
def func_c028c7ad92004fe2b2372c862cb6ab5d(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return mingain
def func_ef7c8b79e2c845a18f0fee457574ebd2(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return i
def func_a1551ae7b9114cb08a9b6a3ea97b8abb(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_07f3c89b0c584115a6680d70d82b2926(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return a
def func_c0538a6420194a83b0b383c39d2da05f(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return psum
def func_9de1b83b1eca42b7b6bebe67ed3903e4(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_e0f0f052e14146c29ca1590a758acfa3(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_fd26edd4faf84deab13e8c40cb29e74f(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return i
def func_b3241811d60546289bdf7ee6fc9e20c7(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_f937227ddfbc44dfa4dfe19838e3412c(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_383d4bcf3d164e2797ac6d9e6de1741e(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_238f1c452ea24086bd8c32190fffacf9(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_9d91769de67448d2906e79d06cb05b55(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_d78fe9e4de30454cb8414e541a287f97(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_75109454bd8a45a283aca541cd94e2ba(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_68bead40c66f48fdb5a670a3f2750593(N, S, psum):
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_f5962c052a794db48324e97566dfb89c(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return a
def func_1830a3211fcf43318b039339a80d9ac0(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return A
def func_5002d93c3ef046ffb9a973244d8c3e32(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return i
def func_22ad3aae2a344fa497050d0a3edd8b84(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return mingain
def func_f1a3569dd8924799be146a79bee1a0d5(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return psum
def func_7f91a1185a474a76bfdb5d8a16e3b9ae(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
return S
def func_d91c9a4c0a9a496989efba0bda01db50(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return S
def func_0db2f7f158824d4696913c2298349c87(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return minpgain
def func_edec0b6f897941cabcea81d590e0c42d(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return a
def func_2693877005bc425bb5d10d50ba184b66(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return mingain
def func_1ce5638f9bdd4f8aba7b70dc06e5b47f(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return i
def func_66e6367549a14b1ea1af5a9fdf4a18f2(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return psum
def func_fbc8600b5e19405494a9e67aff3e958c(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_87d5f065e44c43d5a92d0865ce67b846(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return a
def func_4e43a58b6a98411a9807218eef1737ea(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_d6685b452db2449f808a44b0134871c2(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_38244230e4e1401b95efbb92e53bda2e(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return psum
def func_31c61452d8d042e5987f7c997a9e82fd(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return i
def func_615debe1e52c4d5e9c45b79969794e81(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_80c1841ab60c4ca1aff6524d5392324b(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return psum
def func_e714ff5f98e04d81b84a009f24311324(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_f9fcba341f564c719cee8c10a032a601(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_4936dd9d4f07490e8cc9cc49604d3768(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return a
def func_9d6a3f375ce746cbbca5831b3322a123(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_25823d7a17fe4a0d9d7274801a34c785(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_11f18fe4e49544389c0b1b0080a9bbc3(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_77b0dd0f5ece42d8bdb602d57aa8d0d9(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_bfca667e3c6c48a0856c692d8a707e80(N, S, psum):
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_a1219e3a67b64b6c84d15b38f19bdc0f(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return psum
def func_5fb2311d68394da088bbb77042ae3f39(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return a
def func_5baf0148fdbc41c5a0884af3c8b9488b(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return A
def func_6f530fbee79943b1941bd8d51128f7ad(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return mingain
def func_14b40d2d5796402bb55efe34c514758d(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return i
def func_969a285cfd9042ab88335e7ed7b4df66(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return S
def func_59a299a5ebcf4967a956d9d122584dba(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
return minpgain
def func_e304ea182c4b42999a07238ef4fc8a44(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return a
def func_0b5a8e2f2eee466d8ae95c6f156cae38(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_03347800e1b44621be7ac99d75e51117(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_10bb7af27bbd4273ac65078cfb075a37(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return S
def func_f44bff9274064b568f97c4d9cb7c22a2(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_163d112482f546ac8b79335b3e42388d(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return i
def func_802d188bb9dd4f0fa2e896e30ac6940f(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return psum
def func_36afc1157b3a4d98973283c89e551c95(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return a
def func_34db1e8092a14fa8bb6106c1aead6c2d(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_22b6c464349b459198d3f0a1c2878bb4(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_7b4840b9ffdc410eacaea1af315eba1c(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_ec4a1bd36c564bdab45625216eddba75(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return psum
def func_5d2c937a2b9f4e78b187542550dc2517(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_88933903641a4219bfcd3fe481eccb77(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_3b0afafda89a43e2bb3a9980d5bb8f82(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_a37cc5227aa7488a9be87102c356b008(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_2eb67baef7324be29129845893004761(N, S, A, q, p, r, s):
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_f266123b0c4846998dd359ed4bdb5b84(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return psum
def func_925a3ebc9fd440a482fdf727841ae576(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return A
def func_adbbf3b510fd40d4b4e6214ec6b2e658(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return S
def func_714d2f270e304256a669a737f70b05ec(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return mingain
def func_023b1282d89b4658a884376e64c37278(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return minpgain
def func_2af9cbec022e44c89e114a5ce4b48ef0(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return i
def func_2f109fa61b7546c6856d48d9e2769ffc(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return a
def func_71142994bb33474398dea0fc8f2a326e(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
return hpi
def func_7058c4e1ed1d44e89960c28f84290b4f(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_6579940ed29f481794db86541902a629(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_c3fe3ca4797c4f7885494ad61968c3c6(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_7d23a97a960e4b70ab90c416cb5dc552(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return psum
def func_ca17dd55efb041c7a2849793e25916ab(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return a
def func_f9d0fe1c076f46f193c2517448dac04a(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_016dfbfebd444e53b2f148aff80505dc(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_34ad324c059547b5b04b4756f95ca337(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_426213315ea7410a94fb09ec4513b86e(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return S
def func_7931e2dc00de4fc3ae7fb5b2a530f251(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_5f1c8567a4ca4b47b0ce58629de7df8e(N, S, A, q, p, r, s):
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_f175bba566a549b8ab301ca384863ec3(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return a
def func_0035aba98d68485abf7b6f3a94431c81(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return mingain
def func_e1a3b33b2d3f42e8b8ff9694a796ae7b(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return S
def func_03cecdc8dae04e27bb969d108b458f97(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return hpi
def func_082af1d6e6534c459e4d3fd0f6c453f1(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return pgain
def func_9ef3de985828496ea61225cb3a168cab(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return psum
def func_14c9a5f472f94ebe8b6e49475439dcd6(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return j
def func_dd84a6414ece4f57bdff9140824c530e(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return gain
def func_b6e4ba192f3348439b1b3727df1968a1(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return minpgain
def func_6f5ad5fb2e174f73bb4c951cc96c585d(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return i
def func_e3780b0a05cc4fb8b8ffdc236f22954f(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return A
def func_abfc3aebb400425799aff89bf84f7781(N, A, q, p, r, s):
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_25cf29aa45eb4953933671bb51a0d73a(N, q, p, r, s):
A = []
S = []
psum = 0
for i in range(N):
a = s + (i * p + q) % r
A.append(a)
psum = psum + a
S.append(psum)
"""mingain = psum
for i in range(N):
for j in range(i+1):
gain = max(S[j],S[i]-S[j],psum-S[i])
mingain = min(mingain,gain)
return float(psum-mingain)/psum"""
mingain = psum
minpgain = psum
hpi = 0
for i in range(N):
j = hpi
while j <= i:
pgain = max(S[j], S[i] - S[j])
gain = max(S[j], S[i] - S[j], psum - S[i])
mingain = min(mingain, gain)
if 2 * S[j] > S[i]:
break
if pgain <= minpgain:
hpi = j
j = j + 1
return float(psum - mingain) / psum
def func_e386ba13b5e6444d857791be90564075():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
return T
def func_3ad2304ed4c845ac9a89aa5f56d752ea():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
return infile
def func_8505bca3c5a94d64a05c79143850f6e7(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return r
def func_b76665a33a3a4cc4b62f81e11949867b(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return q
def func_b7a2172a208b425992a390d579f4816a(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return s
def func_07ebb54ae41a439f97ef84edabe07d3b(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return T
def func_304baf99a0574262b7f0caf7d4eff9f3(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return t
def func_d44f2afc173d4cb28f35c9e4b0bdc7f0(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return N
def func_34d4f8497c234262a9d2ef200655977e(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return p
def func_b168879d54db44f8b434bde7a17a762b(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return N
def func_24f7c1edc9e24007a1ece13da11bc5ba(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return t
def func_dfa4e25f70364c17bdbb73b6703a79e3(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return r
def func_4916594f3be0497db895604237ee47a3(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return p
def func_4f871ae77902499bb0f9c5a6af8ca118(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return q
def func_511d5ed4b0da47b7a7261330883e77db(T, infile):
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return s
def func_492b7b2ff8b84d8d817c2414184bea3f():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return r
def func_78829a7e763d4429a2ee474821c369a5():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return t
def func_3c768c58687a45f080dfb73d6cc7b833():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return infile
def func_220a5723e0e7427a90183154b850c688():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return T
def func_b8fa2e4effa8495d916ce456f54f5736():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return q
def func_51c9c93f290947e1b368c81f64dc4095():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return s
def func_a32461e0c1654763b1f0be6030e2c52b():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return N
def func_5292248012c74ccebc933ea35b58a0e9():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
return p
def func_d74ac30fac404082b82c8b16a56f7483(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return t
def func_05c94f28678248aeafe70985e3327dd0(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return s
def func_d9cec4d5b73842d98865e44cab92630c(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return r
def func_8cfcf581fa694333b13159edb9a2d857(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return p
def func_c5fcb53f14fb4b8d869502a0de9bf295(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return T
def func_7b8a7f5d05744fe79c987722ca11c464(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return N
def func_6b61f21bde9f4a6a93cfcac94ab6c9ae(infile):
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return q
def func_a018108d85c6493abb158d11c80ba5ab():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return N
def func_f624f0afe0ad4530b03774e621d5d86a():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return p
def func_d536ffe452c94b92b1b232032359a975():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return infile
def func_1203201756974e1dbb0318ee53c8cdaf():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return r
def func_9bcce734072047fea47192e11fab0dc3():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return t
def func_8f07af81f01e457ca3cbcba0182d612f():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return q
def func_3b0fc402dab640b09a2c5a54b0167d0b():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return s
def func_35a3f4a8084f4dfeab38d2273935e6fa():
infile = open('codejam/test_files/Y14R5P1/A.in')
T = int(infile.readline())
for t in range(T):
N, p, q, r, s = map(int, infile.readline().split())
print 'Case #{}: {}'.format(t + 1, solve(N, p, q, r, s))
infile.close()
return T
| 24.605706 | 86 | 0.49808 |
b4578c34b04c3585b1328f3f297594c929cc1188 | 7,782 | py | Python | tools/nni_cmd/tensorboard_utils.py | lawwu/nni | b869dd48dfe36392e7b78c70ea35eb6d4b4779dc | [
"MIT"
] | 2 | 2019-07-08T10:02:36.000Z | 2019-07-08T10:05:27.000Z | tools/nni_cmd/tensorboard_utils.py | lawwu/nni | b869dd48dfe36392e7b78c70ea35eb6d4b4779dc | [
"MIT"
] | 16 | 2020-01-28T22:44:42.000Z | 2022-02-10T00:20:32.000Z | tools/nni_cmd/tensorboard_utils.py | lawwu/nni | b869dd48dfe36392e7b78c70ea35eb6d4b4779dc | [
"MIT"
] | 1 | 2019-11-29T08:56:14.000Z | 2019-11-29T08:56:14.000Z | # Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import psutil
import json
import datetime
import time
from subprocess import call, check_output, Popen, PIPE
from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response
from .config_utils import Config, Experiments
from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url, get_local_urls
from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, COLOR_GREEN_FORMAT
import time
from .common_utils import print_normal, print_error, print_warning, detect_process, detect_port
from .nnictl_utils import *
import re
from .ssh_utils import create_ssh_sftp_client, copy_remote_directory_to_local
import tempfile
def parse_log_path(args, trial_content):
'''parse log path'''
path_list = []
host_list = []
for trial in trial_content:
if args.trial_id and args.trial_id != 'all' and trial.get('id') != args.trial_id:
continue
pattern = r'(?P<head>.+)://(?P<host>.+):(?P<path>.*)'
match = re.search(pattern,trial['logPath'])
if match:
path_list.append(match.group('path'))
host_list.append(match.group('host'))
if not path_list:
print_error('Trial id %s error!' % args.trial_id)
exit(1)
return path_list, host_list
def copy_data_from_remote(args, nni_config, trial_content, path_list, host_list, temp_nni_path):
'''use ssh client to copy data from remote machine to local machien'''
machine_list = nni_config.get_config('experimentConfig').get('machineList')
machine_dict = {}
local_path_list = []
for machine in machine_list:
machine_dict[machine['ip']] = {'port': machine['port'], 'passwd': machine['passwd'], 'username': machine['username']}
for index, host in enumerate(host_list):
local_path = os.path.join(temp_nni_path, trial_content[index].get('id'))
local_path_list.append(local_path)
print_normal('Copying log data from %s to %s' % (host + ':' + path_list[index], local_path))
sftp = create_ssh_sftp_client(host, machine_dict[host]['port'], machine_dict[host]['username'], machine_dict[host]['passwd'])
copy_remote_directory_to_local(sftp, path_list[index], local_path)
print_normal('Copy done!')
return local_path_list
def get_path_list(args, nni_config, trial_content, temp_nni_path):
'''get path list according to different platform'''
path_list, host_list = parse_log_path(args, trial_content)
platform = nni_config.get_config('experimentConfig').get('trainingServicePlatform')
if platform == 'local':
print_normal('Log path: %s' % ' '.join(path_list))
return path_list
elif platform == 'remote':
path_list = copy_data_from_remote(args, nni_config, trial_content, path_list, host_list, temp_nni_path)
print_normal('Log path: %s' % ' '.join(path_list))
return path_list
else:
print_error('Not supported platform!')
exit(1)
def format_tensorboard_log_path(path_list):
new_path_list = []
for index, value in enumerate(path_list):
new_path_list.append('name%d:%s' % (index + 1, value))
return ','.join(new_path_list)
def start_tensorboard_process(args, nni_config, path_list, temp_nni_path):
'''call cmds to start tensorboard process in local machine'''
if detect_port(args.port):
print_error('Port %s is used by another process, please reset port!' % str(args.port))
exit(1)
with open(os.path.join(temp_nni_path, 'tensorboard_stdout'), 'a+') as stdout_file, open(os.path.join(temp_nni_path, 'tensorboard_stderr'), 'a+') as stderr_file:
cmds = ['tensorboard', '--logdir', format_tensorboard_log_path(path_list), '--port', str(args.port)]
tensorboard_process = Popen(cmds, stdout=stdout_file, stderr=stderr_file)
url_list = get_local_urls(args.port)
print_normal(COLOR_GREEN_FORMAT % 'Start tensorboard success!\n' + 'Tensorboard urls: ' + ' '.join(url_list))
tensorboard_process_pid_list = nni_config.get_config('tensorboardPidList')
if tensorboard_process_pid_list is None:
tensorboard_process_pid_list = [tensorboard_process.pid]
else:
tensorboard_process_pid_list.append(tensorboard_process.pid)
nni_config.set_config('tensorboardPidList', tensorboard_process_pid_list)
def stop_tensorboard(args):
'''stop tensorboard'''
experiment_id = check_experiment_id(args)
experiment_config = Experiments()
experiment_dict = experiment_config.get_all_experiments()
config_file_name = experiment_dict[experiment_id]['fileName']
nni_config = Config(config_file_name)
tensorboard_pid_list = nni_config.get_config('tensorboardPidList')
if tensorboard_pid_list:
for tensorboard_pid in tensorboard_pid_list:
try:
cmds = ['kill', '-9', str(tensorboard_pid)]
call(cmds)
except Exception as exception:
print_error(exception)
nni_config.set_config('tensorboardPidList', [])
print_normal('Stop tensorboard success!')
else:
print_error('No tensorboard configuration!')
def start_tensorboard(args):
'''start tensorboard'''
experiment_id = check_experiment_id(args)
experiment_config = Experiments()
experiment_dict = experiment_config.get_all_experiments()
config_file_name = experiment_dict[experiment_id]['fileName']
nni_config = Config(config_file_name)
rest_port = nni_config.get_config('restServerPort')
rest_pid = nni_config.get_config('restServerPid')
if not detect_process(rest_pid):
print_error('Experiment is not running...')
return
running, response = check_rest_server_quick(rest_port)
trial_content = None
if running:
response = rest_get(trial_jobs_url(rest_port), REST_TIME_OUT)
if response and check_response(response):
trial_content = json.loads(response.text)
else:
print_error('List trial failed...')
else:
print_error('Restful server is not running...')
if not trial_content:
print_error('No trial information!')
exit(1)
if len(trial_content) > 1 and not args.trial_id:
print_error('There are multiple trials, please set trial id!')
exit(1)
experiment_id = nni_config.get_config('experimentId')
temp_nni_path = os.path.join(tempfile.gettempdir(), 'nni', experiment_id)
os.makedirs(temp_nni_path, exist_ok=True)
path_list = get_path_list(args, nni_config, trial_content, temp_nni_path)
start_tensorboard_process(args, nni_config, path_list, temp_nni_path)
| 47.45122 | 164 | 0.721023 |
ef58a0e2deef128ebd96ae93e26287107f7cadfd | 1,053 | py | Python | b_cfn_custom_userpool_authorizer/source/policy_document.py | simonasvaitkus/B.CfnCustomUserPoolAuthorizer | 6532665d3b29031f86c19205cd58b0bcd9e4f827 | [
"Apache-2.0"
] | null | null | null | b_cfn_custom_userpool_authorizer/source/policy_document.py | simonasvaitkus/B.CfnCustomUserPoolAuthorizer | 6532665d3b29031f86c19205cd58b0bcd9e4f827 | [
"Apache-2.0"
] | null | null | null | b_cfn_custom_userpool_authorizer/source/policy_document.py | simonasvaitkus/B.CfnCustomUserPoolAuthorizer | 6532665d3b29031f86c19205cd58b0bcd9e4f827 | [
"Apache-2.0"
] | null | null | null | from typing import Dict, Any
class PolicyDocument:
"""
Policy document that is constructed according to this documentation:
https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-lambda-authorizer-output.html
"""
def __init__(
self,
region: str,
account_id: str,
api_id: str
) -> None:
self.region = region
self.account_id = account_id
self.api_id = api_id
def create_policy_statement(
self,
allow: bool = False
) -> Dict[str, Any]:
return {
'principalId': 'user',
'policyDocument': {
'Version': '2012-10-17',
'Statement': [
{
'Action': 'execute-api:Invoke',
'Resource': f'arn:aws:execute-api:{self.region}:{self.account_id}:{self.api_id}/*/*',
'Effect': 'Allow' if allow else 'Deny'
}
]
}
}
| 29.25 | 109 | 0.488129 |
243fade710378b8a36ab7c0f62d54e2b230afe5d | 4,799 | py | Python | bridge_env.py | wuminghui100/bridge-maintenance-optimizer | 1f4b7a5e74a56a508f33acb8ea0c7a7cbc870e9d | [
"MIT"
] | null | null | null | bridge_env.py | wuminghui100/bridge-maintenance-optimizer | 1f4b7a5e74a56a508f33acb8ea0c7a7cbc870e9d | [
"MIT"
] | null | null | null | bridge_env.py | wuminghui100/bridge-maintenance-optimizer | 1f4b7a5e74a56a508f33acb8ea0c7a7cbc870e9d | [
"MIT"
] | null | null | null | import numpy as np
from utils import time_encoder, processsa
import math,os
from model_generate import Predictor
class BridgeEnv:
def __init__(self):
self.time = 0
self.n_features = 12
self.action_space = [0,1,2,3]
self.n_actions = len(self.action_space)
self.state = self.reset()
self.state_num = np.where(self.state[0:5]==1)
self.lifereward_his = []
self.trans_his = np.zeros(shape=[10,4,5,5])
self.trans_real = np.zeros(shape=[10,4,5,5])
self.pred = Predictor(1, co_phy=2)
#import historical transition from Predicor
self.trans_his = self.pred.transition
self.his_pred_accuracy = self.pred.accuracy
#import real transition model from scenario files
path = 'scenarios\\scenario6.npy'
dirpath = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(dirpath, path)
self.trans_real = np.load(path)
def reset(self):
state = np.zeros(self.n_features, dtype=np.int32)
state[4] = 1
self.time = 0
_year = np.zeros(7, dtype = np.int32)
_year = time_encoder(self.time)
state[5:12] = _year
self.state = state
self.state_num = 4
return self.state
def render(self):
print(self.state)
def cost_normalize(self, cost):
return (6.57e-7)*cost-0.2
def cost_denormalize(self, cost_n):
return (cost_n+0.2)/(6.57e-7)
def costs(self, state, action, fail):
a_costlist = [0, 70434, 251606, 732799]
s_failpossibility = [0.1, 0.06, 0.01, 0.003, 0.0001]
failcost = 1976947
'''
if fail:
cost = a_costlist[action]+s_failpossibility[state]*failcost+failcost
else:
cost = a_costlist[action]+s_failpossibility[state]*failcost
'''
if fail:
cost = a_costlist[action]+failcost
else:
cost = a_costlist[action]
return self.cost_normalize(cost)
def spending(self, action, fail):
#real expenditure
#assume theory suits well with real world case
a_costlist = [0, 70434, 251606, 732799]
failcost = 1976947
if fail:
cost = a_costlist[action]+failcost
else:
cost = a_costlist[action]
return cost
def step(self, action, render=False, real_world=False, noChange=False):
if noChange == False:
self.time += 2
#print(self.time)
if self.time>=100:
done = True
else:
done = False
#transition model
trans = np.zeros(shape=[4,5,5])
#theoritical result based on data from other bridges
if real_world==False:
period = math.floor((self.time-1)/10.0)
trans = self.trans_his[period]
#real world transition based on reasonable assumption
#still need more subtle design
else:
period = math.floor((self.time-1)/10.0)
trans = self.trans_real[period]
# deterioration
sindex = self.state_num
_sindex = np.random.choice(5, 1, p=trans[action, sindex, :])
# failure
s_failpossibility = [0.1, 0.06, 0.01, 0.003, 0.0001]
if np.random.rand()<s_failpossibility[int(_sindex)]:
# the deck system break down
fail = True
else:
fail = False
#reward, decided by the action and the possible failure
reward = -self.costs(int(_sindex), action, fail)
# if fail, the rebuild will lead the state back to state 4
if fail:
_state = 4
else:
_state = int(_sindex)
if render:
self.render()
if noChange == False:
# not prediction in safeQLearning
# the state will change
self.state_num = _state
self.state[0:5] = processsa(self.state_num)
self.state[5:12] = time_encoder(self.time)
return self.state, reward, fail, done
else:
# state won't change
# return the predicted next state
state_num_vir = _state
state_vir = np.zeros(self.n_features, dtype=np.int32)
state_vir[0:5] = processsa(state_num_vir)
state_vir[5:12] = time_encoder(self.time+2)
return state_vir, reward, fail, done
def plot_lifecost(self):
import matplotlib.pyplot as plt
plt.plot(np.arange(len(self.lifereward_his)), self.lifereward_his)
plt.ylabel('Lifereward')
plt.xlabel('episode/100')
plt.show() | 34.278571 | 81 | 0.562826 |
35a003b7613e1688cd576803407bef20894a773d | 1,598 | py | Python | setup.py | definitelyprobably/alnitak | 9e0413ce4c7408a8516570c980def27ebe8ee6c3 | [
"MIT"
] | null | null | null | setup.py | definitelyprobably/alnitak | 9e0413ce4c7408a8516570c980def27ebe8ee6c3 | [
"MIT"
] | null | null | null | setup.py | definitelyprobably/alnitak | 9e0413ce4c7408a8516570c980def27ebe8ee6c3 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
import alnitak
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='alnitak',
version=alnitak.__version__,
packages=find_packages(),
setup_requires=[
'pytest-runner',
'requests>=2.21.0',
'cryptography>=2.4.2',
],
install_requires=[
'requests>=2.21.0',
'cryptography>=2.4.2',
],
tests_require=[ 'pytest' ],
entry_points={ 'console_scripts': [ 'alnitak = alnitak.main:main' ], },
author='K. S. Kooner',
author_email='ksa.kooner@gmail.com',
license='MIT',
url='https://github.com/definitelyprobably/alnitak',
description='Create and manage DANE (DNS TLSA) records',
long_description=long_description,
keywords="dane tlsa",
platforms="Linux, POSIX",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Intended Audience :: System Administrators",
"Topic :: Internet",
"Topic :: Security",
"Topic :: System :: Systems Administration",
"Topic :: Utilities",
],
)
| 28.035088 | 75 | 0.586358 |
113f3a77047aa9b0543802e27aa945a6537c4cd4 | 334 | py | Python | arquivos-py/CursoEmVideo_Python3_AULAS/aula-14d.py | oliveiralecca/cursoemvideo-python3 | e0a3e27d73a49ce0e72ae4faa9ac0c6da9811d2e | [
"MIT"
] | 2 | 2021-07-14T13:22:36.000Z | 2021-07-18T01:28:21.000Z | arquivos-py/CursoEmVideo_Python3_AULAS/aula-14d.py | oliveiralecca/cursoemvideo-python3 | e0a3e27d73a49ce0e72ae4faa9ac0c6da9811d2e | [
"MIT"
] | 2 | 2021-07-18T01:09:26.000Z | 2021-07-18T01:30:23.000Z | arquivos-py/CursoEmVideo_Python3_AULAS/aula-14d.py | oliveiralecca/cursoemvideo-python3 | e0a3e27d73a49ce0e72ae4faa9ac0c6da9811d2e | [
"MIT"
] | null | null | null | n = 1
par = impar = 0
while n != 0:
n = int(input('Digite um número: '))
if n != 0: # PARA QUE O 0 QUE INDICA O FIM DO PROGRAMA, NÃO SEJA CONSIDERADO NA CONTAGEM!
if n % 2 == 0:
par += 1
else:
impar += 1
print('Você digitou {} números PARES e {} números ÍMPARES!'.format(par, impar))
| 30.363636 | 95 | 0.541916 |
d0055329dca7c68c3a82bd642ce915d58016b98e | 370 | py | Python | session-bot/bot/utils.py | Chasbob/session-bot | a7090e11a16801bd74816c690cb62e19d6fcc2f4 | [
"MIT"
] | null | null | null | session-bot/bot/utils.py | Chasbob/session-bot | a7090e11a16801bd74816c690cb62e19d6fcc2f4 | [
"MIT"
] | null | null | null | session-bot/bot/utils.py | Chasbob/session-bot | a7090e11a16801bd74816c690cb62e19d6fcc2f4 | [
"MIT"
] | null | null | null | import random
from typing import List
import discord
async def add_reactions(message, emojis: List[discord.Emoji] = None):
if emojis is None or len(emojis) < 4:
emojis = ["💻", "🙌", "🔥", "💯", "🍕", "🎉", "🥳", "💡", "📣"]
else:
emojis = emojis.copy()
random.shuffle(emojis)
for emoji in emojis[:4]:
await message.add_reaction(emoji)
| 24.666667 | 69 | 0.583784 |
61a3b5e5e45d9917a885b9c7ab4547ed3c3b107d | 4,403 | py | Python | api/app/models/click_factory.py | meedan/vframe | 736f46767c587dd70fc50f5e5783c0a64b6f51e1 | [
"MIT"
] | 2 | 2019-05-25T20:48:29.000Z | 2019-07-10T12:42:17.000Z | api/app/models/click_factory.py | meedan/vframe | 736f46767c587dd70fc50f5e5783c0a64b6f51e1 | [
"MIT"
] | 1 | 2019-12-11T14:53:07.000Z | 2019-12-11T14:53:07.000Z | api/app/models/click_factory.py | meedan/vframe | 736f46767c587dd70fc50f5e5783c0a64b6f51e1 | [
"MIT"
] | null | null | null | """
Click processor factory
- Inspired by and used code from @wiretapped's HTSLAM codebase
- In particular the very useful
"""
import os
import sys
from os.path import join
from pathlib import Path
import os
from os.path import join
import sys
from functools import update_wrapper, wraps
import itertools
from pathlib import Path
from glob import glob
import importlib
import logging
import click
from app.settings import app_cfg as cfg
# --------------------------------------------------------
# Click Group Class
# --------------------------------------------------------
# set global variable during parent class create
dir_plugins = None # set in create
class ClickComplex:
"""Wrapper generator for custom Click CLI's based on LR's coroutine"""
def __init__(self):
pass
class CustomGroup(click.Group):
#global dir_plugins # from CliGenerator init
# lists commands in plugin directory
def list_commands(self, ctx):
global dir_plugins # from CliGenerator init
rv = list(self.commands.keys())
fp_cmds = [Path(x) for x in Path(dir_plugins).iterdir() \
if str(x).endswith('.py') \
and '__init__' not in str(x)]
for fp_cmd in fp_cmds:
try:
assert fp_cmd.name not in rv, "[-] Error: {} can't exist in cli.py and {}".format(fp_cmd.name)
except Exception as ex:
logging.getLogger('app').error('{}'.format(ex))
rv.append(fp_cmd.stem)
rv.sort()
return rv
# Complex version: gets commands in directory and in this file
# Based on code from @wiretapped + HTSLAM
def get_command(self, ctx, cmd_name):
global dir_plugins
if cmd_name in self.commands:
return self.commands[cmd_name]
ns = {}
fpp_cmd = Path(dir_plugins, cmd_name + '.py')
fp_cmd = fpp_cmd.as_posix()
if not fpp_cmd.exists():
sys.exit('[-] {} file does not exist'.format(fpp_cmd))
code = compile(fpp_cmd.read_bytes(), fp_cmd, 'exec')
try:
eval(code, ns, ns)
except Exception as ex:
logging.getLogger('vframe').error('exception: {}'.format(ex))
@click.command()
def _fail():
raise Exception('while loading {}'.format(fpp_cmd.name))
_fail.short_help = repr(ex)
_fail.help = repr(ex)
return _fail
if 'cli' not in ns:
sys.exit('[-] Error: {} does not contain a cli function'.format(fp_cmd))
return ns['cli']
@classmethod
def create(self, dir_plugins_local):
global dir_plugins
dir_plugins = dir_plugins_local
return self.CustomGroup
class ClickSimple:
"""Wrapper generator for custom Click CLI's"""
def __init__(self):
pass
class CustomGroup(click.Group):
#global dir_plugins # from CliGenerator init
# lists commands in plugin directory
def list_commands(self, ctx):
global dir_plugins # from CliGenerator init
rv = list(self.commands.keys())
fp_cmds = [Path(x) for x in Path(dir_plugins).iterdir() \
if str(x).endswith('.py') \
and '__init__' not in str(x)]
for fp_cmd in fp_cmds:
assert fp_cmd.name not in rv, "[-] Error: {} can't exist in cli.py and {}".format(fp_cmd.name)
rv.append(fp_cmd.stem)
rv.sort()
return rv
# Complex version: gets commands in directory and in this file
# from HTSLAM
def get_command(self, ctx, cmd_name):
global dir_plugins # from CliGenerator init
if cmd_name in self.commands:
return self.commands[cmd_name]
ns = {}
fpp_cmd = Path(dir_plugins, cmd_name + '.py')
fp_cmd = fpp_cmd.as_posix()
if not fpp_cmd.exists():
sys.exit('[-] {} file does not exist'.format(fpp_cmd))
code = compile(fpp_cmd.read_bytes(), fp_cmd, 'exec')
try:
eval(code, ns, ns)
except Exception as ex:
logging.getLogger('vframe').error('exception: {}'.format(ex))
@click.command()
def _fail():
raise Exception('while loading {}'.format(fpp_cmd.name))
_fail.short_help = repr(ex)
_fail.help = repr(ex)
return _fail
if 'cli' not in ns:
sys.exit('[-] Error: {} does not contain a cli function'.format(fp_cmd))
return ns['cli']
@classmethod
def create(self, dir_plugins_local):
global dir_plugins
dir_plugins = dir_plugins_local
return self.CustomGroup
| 30.157534 | 104 | 0.626618 |
5b9329620d9883de9922163dc970fd09a86c75bf | 7,070 | py | Python | apps/dashboard/widgets.py | mariusaarsnes/onlineweb4 | 3495321dabfd7a7236e6d841b004e9f855b6f30e | [
"MIT"
] | null | null | null | apps/dashboard/widgets.py | mariusaarsnes/onlineweb4 | 3495321dabfd7a7236e6d841b004e9f855b6f30e | [
"MIT"
] | null | null | null | apps/dashboard/widgets.py | mariusaarsnes/onlineweb4 | 3495321dabfd7a7236e6d841b004e9f855b6f30e | [
"MIT"
] | null | null | null | # -*- coding: utf8 -*-
#
# Created by 'myth' on 10/18/15
from django.forms.utils import format_html
from django.forms.widgets import TextInput
from django.utils.encoding import force_text
DATEPICKER_WIDGET_STRING = """
<div class="input-group dp">\r\n
<span class="input-group-btn datepickerbutton">\r\n
<a href="#" class="btn btn-primary">\r\n
<i class="fa fa-calendar fa-lg"></i></a></span>\r\n
<input class="form-control" id="{id}" name="{name}" type="text" placeholder="{placeholder}" value="{value}" />\r\n
</div>\r\n
"""
DATETIMEPICKER_WIDGET_STRING = """
<div class="input-group dtp">\r\n
<span class="input-group-btn datepickerbutton">\r\n
<a href="#" class="btn btn-primary">\r\n
<i class="fa fa-calendar fa-lg"></i></a></span>\r\n
<input class="form-control" id="{id}" name="{name}" type="text" placeholder="{placeholder}" value="{value}" />\r\n
</div>\r\n
"""
TIMEPICKER_WIDGET_STRING = """
<div class="input-group tp">\r\n
<span class="input-group-btn datepickerbutton">\r\n
<a href="#" class="btn btn-primary">\r\n
<i class="fa fa-calendar fa-lg"></i></a></span>\r\n
<input class="form-control" id="{id}" name="{name}" type="text" placeholder="{placeholder}" value="{value}" />\r\n
</div>\r\n
"""
def widget_generator(klass, fields):
"""
Takes in a class and a list of tuples consisting of field_name and an attribute dictionary
:param klass: Class of the input widget
:param fields: List of tuples mapping field names to attribute dictionaries
:return: A dict of input widget instances
"""
widgets = {}
for field_name, attrs in fields:
widgets[field_name] = klass(attrs=attrs)
return widgets
def multiple_widget_generator(klass_field_tuples):
"""
The multiple widget generator takes in a list of tuples consisting of a field widget class and
a list of field and attribute tuples for each field.
Example usage:
dtp_fields = [
('published', {'class': 'highlighted'}),
('updated', {})
]
responsive_img_fields = [
('article_image', {'id': 'article-image'})
]
widgetlist = [(DatetimePickerInput, dtp_fields), (SingleImageInput, responsive_img_fields)]
widgets = multiple_widget_generator(widgetlist)
:param klass_field_tuples:
:return: A final dictionary containing all field name to field widget mappings for use in ModelForms
"""
widgets = {}
for klass, fields in klass_field_tuples:
for field, widget in widget_generator(klass, fields).items():
widgets[field] = widget
return widgets
class DatePickerInput(TextInput):
"""
This form widget metaclass mixin activates Bootstrap Datetimepicker
"""
def __init__(self, attrs=None):
super(DatePickerInput, self).__init__(attrs)
self.input_type = 'text'
def render(self, name, value, attrs=None, renderer=None):
"""
Renders this widget
:param name: Name attribute of the input type
:param value: Value, if any
:param attrs: Dictionary of additional attributes and their values
:return: HTML
"""
if value is None:
value = ''
attrs = self.build_attrs(self.attrs, attrs)
final_attrs = self.build_attrs(attrs, {'type': self.input_type, 'name': name})
if value != '':
final_attrs['value'] = format_html('value="{}"', force_text(self.format_value(value)))
else:
final_attrs['value'] = ''
# Kept for backwards compatibility with existing forms.
final_attrs['placeholder'] = 'Vennligst velg en dato ...'
if attrs.get('placeholder', False):
# Update the placeholder text if supplied.
final_attrs['placeholder'] = force_text(attrs.get('placeholder'))
return format_html(
DATEPICKER_WIDGET_STRING,
id=force_text(final_attrs['id']),
name=force_text(final_attrs['name']),
placeholder=force_text(final_attrs['placeholder']),
value=final_attrs['value']
)
class DatetimePickerInput(TextInput):
"""
This form widget metaclass mixin activates Bootstrap Datetimepicker
"""
def __init__(self, attrs=None):
super(DatetimePickerInput, self).__init__(attrs)
self.input_type = 'text'
def render(self, name, value, attrs=None, renderer=None):
"""
Renders this widget
:param name: Name attribute of the input type
:param value: Value, if any
:param attrs: Dictionary of additional attributes and their values
:return: HTML
"""
if value is None:
value = ''
attrs = self.build_attrs(self.attrs, attrs)
final_attrs = self.build_attrs(attrs, {'type': self.input_type, 'name': name})
if value != '':
final_attrs['value'] = force_text(self.format_value(value))
else:
final_attrs['value'] = ''
# Kept for backwards compatibility with existing forms.
final_attrs['placeholder'] = 'Vennligst velg dato og klokkeslett ...'
if self.attrs.get('placeholder', False):
# Update the placeholder text if supplied.
final_attrs['placeholder'] = force_text(self.attrs.get('placeholder'))
return format_html(
DATETIMEPICKER_WIDGET_STRING,
id=force_text(final_attrs['id']),
name=force_text(final_attrs['name']),
placeholder=force_text(final_attrs['placeholder']),
value=final_attrs['value']
)
class TimePickerInput(TextInput):
"""
This form widget metaclass mixin activates Bootstrap Datetimepicker
"""
def __init__(self, attrs=None):
super(TimePickerInput, self).__init__(attrs)
self.input_type = 'text'
def render(self, name, value, attrs=None, renderer=None):
"""
Renders this widget
:param name: Name attribute of the input type
:param value: Value, if any
:param attrs: Dictionary of additional attributes and their values
:return: HTML
"""
if value is None:
value = ''
attrs = self.build_attrs(self.attrs, attrs)
final_attrs = self.build_attrs(attrs, {'type': self.input_type, 'name': name})
if value != '':
final_attrs['value'] = format_html('value="{}"', force_text(self.format_value(value)))
else:
final_attrs['value'] = ''
# Kept for backwards compatibility with existing forms.
final_attrs['placeholder'] = 'Vennligst velg klokkeslett ...'
if attrs.get('placeholder', False):
# Update the placeholder text if supplied.
final_attrs['placeholder'] = force_text(attrs.get('placeholder'))
return format_html(
TIMEPICKER_WIDGET_STRING,
id=force_text(final_attrs['id']),
name=force_text(final_attrs['name']),
placeholder=force_text(final_attrs['placeholder']),
value=final_attrs['value']
)
| 33.666667 | 114 | 0.632532 |
6463129b08c88d18eeccb0468ae0677252a19cd9 | 3,303 | py | Python | boldigger_cline/boldblast_coi.py | DominikBuchner/BOLDigger-commandline | a75fbc4b48b094bd9cecd0e547b04cb948b4b41d | [
"MIT"
] | 5 | 2020-08-25T14:56:40.000Z | 2022-01-22T18:34:42.000Z | boldigger_cline/boldblast_coi.py | DominikBuchner/BOLDigger-commandline | a75fbc4b48b094bd9cecd0e547b04cb948b4b41d | [
"MIT"
] | 4 | 2021-04-08T13:45:26.000Z | 2022-03-17T14:56:18.000Z | boldigger_cline/boldblast_coi.py | DominikBuchner/BOLDigger-commandline | a75fbc4b48b094bd9cecd0e547b04cb948b4b41d | [
"MIT"
] | null | null | null | import requests_html, openpyxl, ntpath, os, datetime, pkg_resources, sys
import numpy as np
import pandas as pd
from pathlib import Path
from bs4 import BeautifulSoup as BSoup
from requests.exceptions import ConnectionError
from requests.exceptions import ReadTimeout
from openpyxl.utils.dataframe import dataframe_to_rows
from boldigger.boldblast_coi import slices, fasta_to_string, post_request, save_as_df, save_results, fasta_rewrite
from boldigger_cline.login import login
from tqdm import tqdm
def requests(url_list):
html = []
with requests_html.HTMLSession() as session:
for url in tqdm(url_list, desc = 'Downloading results'):
r = session.get(url)
html.append(r.text)
return html
def main(username, password, fasta_path, output_path, query_length):
## import data needed for the requests
certs = pkg_resources.resource_filename(__name__, 'data/certs.pem')
## create session for identificaton engine to save userdata
session = login(username, password, certs)
querys, sequences_names = fasta_to_string(fasta_path, query_length)
## flags to check request status
error = False
## request as long as there are querys left
for query in tqdm(querys, desc = 'Requesting BOLD'):
## stop if there are 3 connectionerrors or timeouts
error_count = 0
## request until you get a good answer from the server
while error_count < 3:
tqdm.write('%s: Requesting BOLD. This will take a while.' % datetime.datetime.now().strftime("%H:%M:%S"))
## try to get a answer from bold server, repeat in case of timeout
try:
links = post_request(query, session)
except ConnectionError:
tqdm.write('%s: ConnectionError, BOLD did not respond properly: Trying to reconnect.' % datetime.datetime.now().strftime("%H:%M:%S"))
error_count += 1
continue
except ReadTimeout:
tqdm.write('%s: Readtimeout, BOLD did not respond in time: Trying to reconnect.' % datetime.datetime.now().strftime("%H:%M:%S"))
error_count += 1
continue
break
else:
error = True
if not error:
## download data from the fetched links
html_list = requests(links)
## parse the returned html
tqdm.write('%s: Parsing html.' % datetime.datetime.now().strftime("%H:%M:%S"))
dataframes = save_as_df(html_list, sequences_names[querys.index(query)])
## save results in resultfile
tqdm.write('%s: Saving results.' % datetime.datetime.now().strftime("%H:%M:%S"))
save_results(dataframes, fasta_path, output_path)
## remove found OTUS from fasta and write it into a new one
tqdm.write('%s: Removing finished OTUs from fasta.' % datetime.datetime.now().strftime("%H:%M:%S"))
fasta_rewrite(fasta_path, query_length)
else:
tqdm.write('%s: Too many bad connections. Try a smaller batch size. Close to continue.' % datetime.datetime.now().strftime("%H:%M:%S"))
sys.exit()
if not error:
tqdm.write('%s: Done.' % datetime.datetime.now().strftime("%H:%M:%S"))
| 39.795181 | 149 | 0.644566 |
e8ecfa679235c39e156b670195639846fc3e2dd0 | 45,137 | py | Python | python/ccxt/async_support/hollaex.py | RusEu/ccxt | d6d2b3e2f54a59d102102ee2858eca4d6702fecc | [
"MIT"
] | null | null | null | python/ccxt/async_support/hollaex.py | RusEu/ccxt | d6d2b3e2f54a59d102102ee2858eca4d6702fecc | [
"MIT"
] | null | null | null | python/ccxt/async_support/hollaex.py | RusEu/ccxt | d6d2b3e2f54a59d102102ee2858eca4d6702fecc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NetworkError
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class hollaex(Exchange):
def describe(self):
return self.deep_extend(super(hollaex, self).describe(), {
'id': 'hollaex',
'name': 'HollaEx',
'countries': ['KR'],
'rateLimit': 333,
'version': 'v2',
'has': {
'CORS': False,
'fetchMarkets': True,
'fetchCurrencies': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchOrderBook': True,
'fetchOrderBooks': True,
'fetchTrades': True,
'fetchOHLCV': True,
'fetchBalance': True,
'createOrder': True,
'createLimitBuyOrder': True,
'createLimitSellOrder': True,
'createMarketBuyOrder': True,
'createMarketSellOrder': True,
'cancelOrder': True,
'cancelAllOrders': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOpenOrder': True,
'fetchOrder': False,
'fetchDeposits': True,
'fetchWithdrawals': True,
'fetchTransactions': False,
'fetchOrders': True,
'fetchMyTrades': True,
'withdraw': True,
'fetchDepositAddress': True,
},
'timeframes': {
'1h': '1h',
'1d': '1d',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/75841031-ca375180-5ddd-11ea-8417-b975674c23cb.jpg',
'api': 'https://api.hollaex.com',
'www': 'https://hollaex.com',
'doc': 'https://apidocs.hollaex.com',
'referral': 'https://pro.hollaex.com/signup?affiliation_code=QSWA6G',
},
'precisionMode': TICK_SIZE,
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'api': {
'public': {
'get': [
'health',
'constants',
'kit',
'tiers',
'ticker',
'tickers',
'orderbook',
'orderbooks',
'trades',
'chart',
'charts',
# TradingView
'udf/config',
'udf/history',
'udf/symbols',
],
},
'private': {
'get': [
'user',
'user/balance',
'user/deposits',
'user/withdrawals',
'user/withdrawal/fee',
'user/trades',
'orders',
'orders/{order_id}',
],
'post': [
'user/request-withdrawal',
'order',
],
'delete': [
'order/all',
'order',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'taker': 0.001,
'maker': 0.001,
},
},
'exceptions': {
'broad': {
'Invalid token': AuthenticationError,
'Order not found': OrderNotFound,
'Insufficient balance': InsufficientFunds,
},
'exact': {
'400': BadRequest,
'403': AuthenticationError,
'404': BadRequest,
'405': BadRequest,
'410': BadRequest,
'429': BadRequest,
'500': NetworkError,
'503': NetworkError,
},
},
'options': {
# how many seconds before the authenticated request expires
'api-expires': int(self.timeout / 1000),
},
})
async def fetch_markets(self, params={}):
response = await self.publicGetConstants(params)
#
# {
# coins: {
# xmr: {
# id: 7,
# fullname: "Monero",
# symbol: "xmr",
# active: True,
# allow_deposit: True,
# allow_withdrawal: True,
# withdrawal_fee: 0.02,
# min: 0.001,
# max: 100000,
# increment_unit: 0.001,
# deposit_limits: {'1': 0, '2': 0, '3': 0, '4': 0, '5': 0, '6': 0},
# withdrawal_limits: {'1': 10, '2': 15, '3': 100, '4': 100, '5': 200, '6': 300, '7': 350, '8': 400, '9': 500, '10': -1},
# created_at: "2019-12-09T07:14:02.720Z",
# updated_at: "2020-01-16T12:12:53.162Z"
# },
# # ...
# },
# pairs: {
# 'btc-usdt': {
# id: 2,
# name: "btc-usdt",
# pair_base: "btc",
# pair_2: "usdt",
# taker_fees: {'1': 0.3, '2': 0.25, '3': 0.2, '4': 0.18, '5': 0.1, '6': 0.09, '7': 0.08, '8': 0.06, '9': 0.04, '10': 0},
# maker_fees: {'1': 0.1, '2': 0.08, '3': 0.05, '4': 0.03, '5': 0, '6': 0, '7': 0, '8': 0, '9': 0, '10': 0},
# min_size: 0.0001,
# max_size: 1000,
# min_price: 100,
# max_price: 100000,
# increment_size: 0.0001,
# increment_price: 0.05,
# active: True,
# created_at: "2019-12-09T07:15:54.537Z",
# updated_at: "2019-12-09T07:15:54.537Z"
# },
# },
# config: {tiers: 10},
# status: True
# }
#
pairs = self.safe_value(response, 'pairs', {})
keys = list(pairs.keys())
result = []
for i in range(0, len(keys)):
key = keys[i]
market = pairs[key]
id = self.safe_string(market, 'name')
baseId = self.safe_string(market, 'pair_base')
quoteId = self.safe_string(market, 'pair_2')
base = self.common_currency_code(baseId.upper())
quote = self.common_currency_code(quoteId.upper())
symbol = base + '/' + quote
active = self.safe_value(market, 'active')
maker = self.fees['trading']['maker']
taker = self.fees['trading']['taker']
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': {
'price': self.safe_number(market, 'increment_price'),
'amount': self.safe_number(market, 'increment_size'),
},
'limits': {
'amount': {
'min': self.safe_number(market, 'min_size'),
'max': self.safe_number(market, 'max_size'),
},
'price': {
'min': self.safe_number(market, 'min_price'),
'max': self.safe_number(market, 'max_price'),
},
'cost': {'min': None, 'max': None},
},
'taker': taker,
'maker': maker,
'info': market,
})
return result
async def fetch_currencies(self, params={}):
response = await self.publicGetConstants(params)
coins = self.safe_value(response, 'coins', {})
keys = list(coins.keys())
result = {}
for i in range(0, len(keys)):
key = keys[i]
currency = coins[key]
id = self.safe_string(currency, 'symbol')
numericId = self.safe_integer(currency, 'id')
code = self.safe_currency_code(id)
name = self.safe_string(currency, 'fullname')
active = self.safe_value(currency, 'active')
fee = self.safe_number(currency, 'withdrawal_fee')
precision = self.safe_number(currency, 'increment_unit')
withdrawalLimits = self.safe_value(currency, 'withdrawal_limits', [])
result[code] = {
'id': id,
'numericId': numericId,
'code': code,
'info': currency,
'name': name,
'active': active,
'fee': fee,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_number(currency, 'min'),
'max': self.safe_number(currency, 'max'),
},
'withdraw': {
'min': None,
'max': self.safe_value(withdrawalLimits, 0),
},
},
}
return result
async def fetch_order_books(self, symbols=None, limit=None, params={}):
await self.load_markets()
response = await self.publicGetOrderbooks(params)
result = {}
marketIds = list(response.keys())
for i in range(0, len(marketIds)):
marketId = marketIds[i]
orderbook = response[marketId]
symbol = self.safe_symbol(marketId, None, '-')
timestamp = self.parse8601(self.safe_string(orderbook, 'timestamp'))
result[symbol] = self.parse_order_book(response[marketId], timestamp)
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
marketId = self.market_id(symbol)
request = {
'symbol': marketId,
}
response = await self.publicGetOrderbooks(self.extend(request, params))
#
# {
# "btc-usdt": {
# "bids": [
# [8836.4, 1.022],
# [8800, 0.0668],
# [8797.75, 0.2398],
# ],
# "asks": [
# [8839.35, 1.5334],
# [8852.6, 0.0579],
# [8860.45, 0.1815],
# ],
# "timestamp": "2020-03-03T02:27:25.147Z"
# },
# "eth-usdt": {},
# # ...
# }
#
orderbook = self.safe_value(response, marketId)
timestamp = self.parse8601(self.safe_string(orderbook, 'timestamp'))
return self.parse_order_book(orderbook, timestamp)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTicker(self.extend(request, params))
#
# {
# open: 8615.55,
# close: 8841.05,
# high: 8921.1,
# low: 8607,
# last: 8841.05,
# volume: 20.2802,
# timestamp: '2020-03-03T03:11:18.964Z'
# }
#
return self.parse_ticker(response, market)
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTickers(self.extend(params))
#
# {
# "bch-usdt": {
# "time": "2020-03-02T04:29:45.011Z",
# "open": 341.65,
# "close":337.9,
# "high":341.65,
# "low":337.3,
# "last":337.9,
# "volume":0.054,
# "symbol":"bch-usdt"
# },
# # ...
# }
#
return self.parse_tickers(response, symbols)
def parse_tickers(self, response, symbols=None):
result = {}
keys = list(response.keys())
for i in range(0, len(keys)):
key = keys[i]
ticker = response[key]
marketId = self.safe_string(ticker, 'symbol', key)
market = self.safe_market(marketId, None, '-')
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
return self.filter_by_array(result, 'symbol', symbols)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# open: 8615.55,
# close: 8841.05,
# high: 8921.1,
# low: 8607,
# last: 8841.05,
# volume: 20.2802,
# timestamp: '2020-03-03T03:11:18.964Z',
# }
#
# fetchTickers
#
# {
# "time": "2020-03-02T04:29:45.011Z",
# "open": 341.65,
# "close": 337.9,
# "high": 341.65,
# "low": 337.3,
# "last": 337.9,
# "volume": 0.054,
# "symbol": "bch-usdt"
# }
#
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.parse8601(self.safe_string_2(ticker, 'time', 'timestamp'))
close = self.safe_number(ticker, 'close')
result = {
'symbol': symbol,
'info': ticker,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': self.safe_number(ticker, 'open'),
'close': close,
'last': self.safe_number(ticker, 'last', close),
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_number(ticker, 'volume'),
'quoteVolume': None,
}
return result
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTrades(self.extend(request, params))
#
# {
# "btc-usdt": [
# {
# "size": 0.5,
# "price": 8830,
# "side": "buy",
# "timestamp": "2020-03-03T04:44:33.034Z"
# },
# # ...
# ]
# }
#
trades = self.safe_value(response, market['id'], [])
return self.parse_trades(trades, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "size": 0.5,
# "price": 8830,
# "side": "buy",
# "timestamp": "2020-03-03T04:44:33.034Z"
# }
#
# fetchMyTrades(private)
#
# {
# "side": "buy",
# "symbol": "eth-usdt",
# "size": 0.086,
# "price": 226.19,
# "timestamp": "2020-03-03T08:03:55.459Z",
# "fee": 0.1
# }
#
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market, '-')
symbol = market['symbol']
datetime = self.safe_string(trade, 'timestamp')
timestamp = self.parse8601(datetime)
side = self.safe_string(trade, 'side')
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'size')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
feeCost = self.safe_number(trade, 'fee')
fee = None
if feeCost is not None:
quote = market['quote']
feeCurrencyCode = market['quote'] if (market is not None) else quote
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'info': trade,
'id': None,
'timestamp': timestamp,
'datetime': datetime,
'symbol': symbol,
'order': None,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_ohlcv(self, symbol, timeframe='1h', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + " fetchOHLCV() requires a 'since' or a 'limit' argument")
else:
end = self.seconds()
start = end - duration * limit
request['to'] = end
request['from'] = start
else:
if limit is None:
request['from'] = int(since / 1000)
request['to'] = self.seconds()
else:
start = int(since / 1000)
request['from'] = start
request['to'] = self.sum(start, duration * limit)
response = await self.publicGetChart(self.extend(request, params))
#
# [
# {
# "time":"2020-03-02T20:00:00.000Z",
# "close":8872.1,
# "high":8872.1,
# "low":8858.6,
# "open":8858.6,
# "symbol":"btc-usdt",
# "volume":1.2922
# },
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_ohlcv(self, response, market=None, timeframe='1h', since=None, limit=None):
#
# {
# "time":"2020-03-02T20:00:00.000Z",
# "close":8872.1,
# "high":8872.1,
# "low":8858.6,
# "open":8858.6,
# "symbol":"btc-usdt",
# "volume":1.2922
# }
#
return [
self.parse8601(self.safe_string(response, 'time')),
self.safe_number(response, 'open'),
self.safe_number(response, 'high'),
self.safe_number(response, 'low'),
self.safe_number(response, 'close'),
self.safe_number(response, 'volume'),
]
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetUserBalance(params)
#
# {
# "updated_at": "2020-03-02T22:27:38.428Z",
# "btc_balance": 0,
# "btc_pending": 0,
# "btc_available": 0,
# "eth_balance": 0,
# "eth_pending": 0,
# "eth_available": 0,
# # ...
# }
#
result = {'info': response}
currencyIds = list(self.currencies_by_id.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(response, currencyId + '_available')
account['total'] = self.safe_string(response, currencyId + '_balance')
result[code] = account
return self.parse_balance(result, False)
async def fetch_open_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'order_id': id,
}
response = await self.privateGetOrdersOrderId(self.extend(request, params))
#
# {
# "id": "string",
# "side": "sell",
# "symbol": "xht-usdt",
# "size": 0.1,
# "filled": 0,
# "stop": null,
# "fee": 0,
# "fee_coin": "usdt",
# "type": "limit",
# "price": 1.09,
# "status": "new",
# "created_by": 116,
# "created_at": "2021-02-17T02:32:38.910Z",
# "updated_at": "2021-02-17T02:32:38.910Z",
# "User": {
# "id": 116,
# "email": "fight@club.com",
# "username": "narrator",
# "exchange_id": 176
# }
# }
#
return self.parse_order(response)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
request = {
'open': True,
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
request = {
'status': 'filled',
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
request = {
# 'symbol': market['id'],
# 'side': 'buy', # 'sell'
# 'status': 'new', # 'filled', 'pfilled', 'canceled'
# 'open': True,
# 'limit': limit, # default 50, max 100
# 'page': 1,
# 'order_by': 'created_at', # id, ...
# 'order': 'asc', # 'desc'
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(self.milliseconds()),
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_date'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit # default 50, max 100
response = await self.privateGetOrders(self.extend(request, params))
#
# {
# "count": 1,
# "data": [
# {
# "id": "string",
# "side": "sell",
# "symbol": "xht-usdt",
# "size": 0.1,
# "filled": 0,
# "stop": null,
# "fee": 0,
# "fee_coin": "usdt",
# "type": "limit",
# "price": 1.09,
# "status": "new",
# "created_by": 116,
# "created_at": "2021-02-17T02:32:38.910Z",
# "updated_at": "2021-02-17T02:32:38.910Z",
# "User": {
# "id": 116,
# "email": "fight@club.com",
# "username": "narrator",
# "exchange_id": 176
# }
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
def parse_order_status(self, status):
statuses = {
'new': 'open',
'pfilled': 'open',
'filled': 'closed',
'canceled': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder, fetchOpenOrder, fetchOpenOrders
#
# {
# "id": "string",
# "side": "sell",
# "symbol": "xht-usdt",
# "size": 0.1,
# "filled": 0,
# "stop": null,
# "fee": 0,
# "fee_coin": "usdt",
# "type": "limit",
# "price": 1.09,
# "status": "new",
# "created_by": 116,
# "created_at": "2021-02-17T02:32:38.910Z",
# "updated_at": "2021-02-17T02:32:38.910Z",
# "User": {
# "id": 116,
# "email": "fight@club.com",
# "username": "narrator",
# "exchange_id": 176
# },
# "fee_structure": {
# "maker": 0.2,
# "taker": 0.2
# },
# }
#
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market, '-')
id = self.safe_string(order, 'id')
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
price = self.safe_number(order, 'price')
amount = self.safe_number(order, 'size')
filled = self.safe_number(order, 'filled')
status = self.parse_order_status(self.safe_string(order, 'status'))
return self.safe_order({
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'filled': filled,
'remaining': None,
'cost': None,
'trades': None,
'fee': None,
'info': order,
'average': None,
})
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'side': side,
'size': amount,
'type': type,
# 'stop': float(self.price_to_precision(symbol, stopPrice)),
# 'meta': {}, # other options such as post_only
}
if type != 'market':
request['price'] = price
stopPrice = self.safe_float_2(params, 'stopPrice', 'stop')
if stopPrice is not None:
request['stop'] = float(self.price_to_precision(symbol, stopPrice))
params = self.omit(params, ['stopPrice', 'stop'])
response = await self.privatePostOrder(self.extend(request, params))
#
# {
# "fee": 0,
# "meta": {},
# "symbol": "xht-usdt",
# "side": "sell",
# "size": 0.1,
# "type": "limit",
# "price": 1,
# "fee_structure": {
# "maker": 0.2,
# "taker": 0.2
# },
# "fee_coin": "usdt",
# "id": "string",
# "created_by": 116,
# "filled": 0,
# "status": "new",
# "updated_at": "2021-02-17T03:03:19.231Z",
# "created_at": "2021-02-17T03:03:19.231Z",
# "stop": null
# }
#
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'order_id': id,
}
response = await self.privateDeleteOrder(self.extend(request, params))
#
# {
# "title": "string",
# "symbol": "xht-usdt",
# "side": "sell",
# "size": 1,
# "type": "limit",
# "price": 0.1,
# "id": "string",
# "created_by": 34,
# "filled": 0
# }
#
return self.parse_order(response)
async def cancel_all_orders(self, symbol=None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.markets(symbol)
request['symbol'] = market['id']
response = await self.privateDeleteOrderAll(self.extend(request, params))
#
# [
# {
# "title": "string",
# "symbol": "xht-usdt",
# "side": "sell",
# "size": 1,
# "type": "limit",
# "price": 0.1,
# "id": "string",
# "created_by": 34,
# "filled": 0
# }
# ]
#
return self.parse_orders(response, market)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'symbol': market['id'],
# 'limit': 50, # default 50, max 100
# 'page': 1, # page of data to retrieve
# 'order_by': 'timestamp', # field to order data
# 'order': 'asc', # asc or desc
# 'start_date': 123, # starting date of queried data
# 'end_date': 321, # ending date of queried data
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit # default 50, max 100
if since is not None:
request['start_date'] = self.iso8601(since)
response = await self.privateGetUserTrades(self.extend(request, params))
#
# {
# "count": 1,
# "data": [
# {
# "side": "buy",
# "symbol": "eth-usdt",
# "size": 0.086,
# "price": 226.19,
# "timestamp": "2020-03-03T08:03:55.459Z",
# "fee": 0.1
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
response = await self.privateGetUser(params)
#
# {
# "id": 620,
# "email": "email@gmail.com",
# "full_name": "",
# "name_verified": False,
# "gender": False,
# "nationality": "",
# "phone_number": "",
# "address": {"city": "", "address": "", "country": "", "postal_code": ""},
# "id_data": {"note": "", "type": "", "number": "", "status": 0},
# "bank_account":[],
# "crypto_wallet":{
# "xrp": "rJtoECs6rPkJoAfgtR8SDDshV6hRHe3X7y:391496555"
# "usdt":"0x1fb4248e167901dfa0d8cdda2243a2126d7ce48d"
# # ...
# },
# "verification_level": 1,
# "otp_enabled": True,
# "activated": True,
# "note": "",
# "username": "user",
# "affiliation_code": "QSWA6G",
# "settings": {
# "chat": {"set_username": False},
# "risk": {"order_portfolio_percentage": 20},
# "audio": {
# "public_trade": False,
# "order_completed": True,
# "order_partially_completed": True
# },
# "language": "en",
# "interface": {"theme": "white","order_book_levels": 10},
# "notification": {
# "popup_order_completed": True,
# "popup_order_confirmation": True,
# "popup_order_partially_filled": True
# }
# },
# "flagged": False,
# "is_hap": False,
# "pin": False,
# "discount": 0,
# "created_at": "2020-03-02T22:27:38.331Z",
# "updated_at": "2020-03-03T07:54:58.315Z",
# "balance": {
# "xht_balance": 0,
# "xht_pending": 0,
# "xht_available": 0,
# # ...
# "updated_at": "2020-03-03T10:21:05.430Z"
# },
# "images": [],
# "fees": {
# "btc-usdt": {"maker_fee": 0.1, "taker_fee": 0.3},
# "eth-usdt": {"maker_fee": 0.1, "taker_fee": 0.3},
# # ...
# }
# }
#
cryptoWallet = self.safe_value(response, 'crypto_wallet')
address = self.safe_string(cryptoWallet, currency['id'])
tag = None
if address is not None:
parts = address.split(':')
address = self.safe_string(parts, 0)
tag = self.safe_string(parts, 1)
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': response,
}
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'currency': currency['id'],
# 'limit': 50, # default 50, max 100
# 'page': 1, # page of data to retrieve
# 'order_by': 'timestamp', # field to order data
# 'order': 'asc', # asc or desc
# 'start_date': 123, # starting date of queried data
# 'end_date': 321, # ending date of queried data
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
if limit is not None:
request['limit'] = limit # default 50, max 100
if since is not None:
request['start_date'] = self.iso8601(since)
response = await self.privateGetUserDeposits(self.extend(request, params))
#
# {
# "count": 1,
# "data": [
# {
# "id": 539,
# "amount": 20,
# "fee": 0,
# "address": "0x5c0cc98270d7089408fcbcc8e2131287f5be2306",
# "transaction_id": "0xd4006327a5ec2c41adbdcf566eaaba6597c3d45906abe78ea1a4a022647c2e28",
# "status": True,
# "dismissed": False,
# "rejected": False,
# "description": "",
# "type": "deposit",
# "currency": "usdt",
# "created_at": "2020-03-03T07:56:36.198Z",
# "updated_at": "2020-03-03T08:00:05.674Z",
# "user_id": 620
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'currency': currency['id'],
# 'limit': 50, # default 50, max 100
# 'page': 1, # page of data to retrieve
# 'order_by': 'timestamp', # field to order data
# 'order': 'asc', # asc or desc
# 'start_date': 123, # starting date of queried data
# 'end_date': 321, # ending date of queried data
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
if limit is not None:
request['limit'] = limit # default 50, max 100
if since is not None:
request['start_date'] = self.iso8601(since)
response = await self.privateGetUserWithdrawals(self.extend(request, params))
#
# {
# "count": 1,
# "data": [
# {
# "id": 539,
# "amount": 20,
# "fee": 0,
# "address": "0x5c0cc98270d7089408fcbcc8e2131287f5be2306",
# "transaction_id": "0xd4006327a5ec2c41adbdcf566eaaba6597c3d45906abe78ea1a4a022647c2e28",
# "status": True,
# "dismissed": False,
# "rejected": False,
# "description": "",
# "type": "withdrawal",
# "currency": "usdt",
# "created_at": "2020-03-03T07:56:36.198Z",
# "updated_at": "2020-03-03T08:00:05.674Z",
# "user_id": 620
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit)
def parse_transaction(self, transaction, currency=None):
#
# {
# "id": 539,
# "amount": 20,
# "fee": 0,
# "address": "0x5c0cc98270d7089408fcbcc8e2131287f5be2306",
# "transaction_id": "0xd4006327a5ec2c41adbdcf566eaaba6597c3d45906abe78ea1a4a022647c2e28",
# "status": True,
# "dismissed": False,
# "rejected": False,
# "description": "",
# "type": "withdrawal",
# "currency": "usdt",
# "created_at": "2020-03-03T07:56:36.198Z",
# "updated_at": "2020-03-03T08:00:05.674Z",
# "user_id": 620
# }
#
id = self.safe_string(transaction, 'id')
txid = self.safe_string(transaction, 'transaction_id')
timestamp = self.parse8601(self.safe_string(transaction, 'created_at'))
updated = self.parse8601(self.safe_string(transaction, 'updated_at'))
type = self.safe_string(transaction, 'type')
amount = self.safe_number(transaction, 'amount')
address = self.safe_string(transaction, 'address')
addressTo = None
addressFrom = None
tag = None
tagTo = None
tagFrom = None
if address is not None:
parts = address.split(':')
address = self.safe_string(parts, 0)
tag = self.safe_string(parts, 1)
addressTo = address
tagTo = tag
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId)
status = self.safe_value(transaction, 'status')
dismissed = self.safe_value(transaction, 'dismissed')
rejected = self.safe_value(transaction, 'rejected')
if status:
status = 'ok'
elif dismissed:
status = 'canceled'
elif rejected:
status = 'failed'
else:
status = 'pending'
fee = {
'currency': code,
'cost': self.safe_number(transaction, 'fee'),
}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'addressFrom': addressFrom,
'address': address,
'addressTo': addressTo,
'tagFrom': tagFrom,
'tag': tag,
'tagTo': tagTo,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
if tag is not None:
address += ':' + tag
request = {
'currency': currency['id'],
'amount': amount,
'address': address,
}
# one time password
otp = self.safe_string(params, 'otp_code')
if (otp is not None) or (self.twofa is not None):
if otp is None:
otp = self.oath()
request['otp_code'] = otp
response = await self.privatePostUserRequestWithdrawal(self.extend(request, params))
return {
'info': response,
'id': None,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
query = self.omit(params, self.extract_params(path))
path = '/' + self.version + '/' + self.implode_params(path, params)
if (method == 'GET') or (method == 'DELETE'):
if query:
path += '?' + self.urlencode(query)
url = self.urls['api'] + path
if api == 'private':
self.check_required_credentials()
defaultExpires = self.safe_integer_2(self.options, 'api-expires', 'expires', int(self.timeout / 1000))
expires = self.sum(self.seconds(), defaultExpires)
expiresString = str(expires)
auth = method + path + expiresString
headers = {
'api-key': self.encode(self.apiKey),
'api-expires': expiresString,
}
if method == 'POST':
headers['Content-type'] = 'application/json'
if query:
body = self.json(query)
auth += body
signature = self.hmac(self.encode(auth), self.encode(self.secret))
headers['api-signature'] = signature
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
if (code >= 400) and (code <= 503):
#
# {"message": "Invalid token"}
#
feedback = self.id + ' ' + body
message = self.safe_string(response, 'message')
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
status = str(code)
self.throw_exactly_matched_exception(self.exceptions['exact'], status, feedback)
| 37.739967 | 144 | 0.43286 |
62bfb75239157984cbb1a411424a90c3f1f5a249 | 758 | py | Python | 11_challenge/11_challenge.py | QQ-88/wtfiswronghere | 5497a4354e6f02b32ff4b4afba23e06054d2f308 | [
"MIT"
] | null | null | null | 11_challenge/11_challenge.py | QQ-88/wtfiswronghere | 5497a4354e6f02b32ff4b4afba23e06054d2f308 | [
"MIT"
] | null | null | null | 11_challenge/11_challenge.py | QQ-88/wtfiswronghere | 5497a4354e6f02b32ff4b4afba23e06054d2f308 | [
"MIT"
] | null | null | null | """
We will use this script to teach Python to absolute beginners
The script is an example of Fizz-Buzz implemented in Python
The FizzBuzz problem:
For all integers between 1 and 99 (include both):
# print fizz for multiples of 3
# print buzz for multiples of 5
# print fizzbuzz for multiples of 3 and 5"
"""
def fizzbuzz(max_num):
"This method implements FizzBuzz"
# Google for 'range in python' to see what it does
for i in range(1,max_num):
# % or modulo division gives you the remainder
if i%3==0 and i%5==0:
print(i,"fizzbuzz")
elif i%3==0:
print(i,"fizz")
elif i%5==0:
print(i,"Buzz")
#----START OF SCRIPT
if __name__=='__main__':
fizzbuzz(100)
| 28.074074 | 61 | 0.626649 |
e18ec69effd6be4b6ef7cac023f7b1ae77a2414e | 1,776 | py | Python | fsl/wrappers/fast.py | physimals/fslpy | 10dd3f996c79d402c65cf0af724b8b00082d5176 | [
"Apache-2.0"
] | null | null | null | fsl/wrappers/fast.py | physimals/fslpy | 10dd3f996c79d402c65cf0af724b8b00082d5176 | [
"Apache-2.0"
] | null | null | null | fsl/wrappers/fast.py | physimals/fslpy | 10dd3f996c79d402c65cf0af724b8b00082d5176 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# fast.py - Wrapper for the FSL fast command.
#
# Author: Martin Craig <martin.craig@eng.ox.ac.uk>
# Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module provides the :func:`fast` function, a wrapper for the FSL
`FAST <https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST>`_ command.
"""
import six
import fsl.utils.assertions as asrt
from . import wrapperutils as wutils
@wutils.fileOrImage('imgs', 'A', 's', 'manualseg', outprefix='out')
@wutils.fileOrArray('a')
@wutils.fslwrapper
def fast(imgs, out='fast', **kwargs):
"""Wrapper for the ``fast`` command.
:arg imgs: Input image(s)
:arg out: Output basename
:arg n_classes: Number of tissue classes (corresponds to the ``--class``
command line option)
"""
if isinstance(imgs, six.string_types):
imgs = [imgs]
asrt.assertIsNifti(*imgs)
valmap = {
'nobias' : wutils.SHOW_IF_TRUE,
'N' : wutils.SHOW_IF_TRUE,
'verbose' : wutils.SHOW_IF_TRUE,
'v' : wutils.SHOW_IF_TRUE,
'Prior' : wutils.SHOW_IF_TRUE,
'P' : wutils.SHOW_IF_TRUE,
'segments' : wutils.SHOW_IF_TRUE,
'nopve' : wutils.SHOW_IF_TRUE,
'g' : wutils.SHOW_IF_TRUE,
'b' : wutils.SHOW_IF_TRUE,
'B' : wutils.SHOW_IF_TRUE,
'p' : wutils.SHOW_IF_TRUE,
}
argmap = {
'n_classes' : 'class',
}
cmd = ['fast', '-v', '--out=%s' % out]
cmd += wutils.applyArgStyle('--=',
valmap=valmap,
argmap=argmap,
singlechar_args=True,
**kwargs)
cmd += imgs
return cmd
| 27.75 | 76 | 0.54223 |
bc9703f00ce691ed15962a0358b160b0c5a00904 | 2,379 | py | Python | ui.py | smh0505/minesweeper | bbcf13df44743cdf3e8153e08da0b9ff27a502ca | [
"MIT"
] | null | null | null | ui.py | smh0505/minesweeper | bbcf13df44743cdf3e8153e08da0b9ff27a502ca | [
"MIT"
] | null | null | null | ui.py | smh0505/minesweeper | bbcf13df44743cdf3e8153e08da0b9ff27a502ca | [
"MIT"
] | null | null | null | import pygame as pg
import cell
class UI:
def __init__(self, size, color_b):
self.background = pg.Surface(size).convert()
self.background.fill(color_b)
self.menu = []
self.selected = 0
def selectUp(self):
self.selected -= 1
if self.selected < 0:
self.selected = len(self.menu) - 1
def selectDown(self):
self.selected += 1
if self.selected >= len(self.menu):
self.selected = 0
def printSelected(self, f_name=None, f_size=36, f_color1=(0, 0, 0), f_color2=(255, 255, 255)):
font = pg.font.Font(f_name, f_size)
for i in range(len(self.menu)):
if i == self.selected:
text = font.render(self.menu[i], 0, f_color2)
self.background.blit(text, (20, 20 + (i * 56)))
else:
text = font.render(self.menu[i], 0, f_color1)
self.background.blit(text, (20, 20 + (i * 56)))
def resetSelected(self):
while self.selected != 0:
self.selectDown()
def titleScreen(size, color_b):
title = UI(size, color_b)
title.menu.append("New Game")
title.menu.append("Options")
title.menu.append("Records")
title.menu.append("Quit")
return title
def levelScreen(size, color_b):
level = UI(size, color_b)
level.menu.append("Beginner")
level.menu.append("Intermediate")
level.menu.append("Expert")
level.menu.append("Back")
return level
def optionScreen(size, color_b):
option = UI(size, color_b)
option.menu.append("Theme")
option.menu.append("Volume")
option.menu.append("Back")
return option
def pauseScreen(size, color_b):
pause = UI(size, color_b)
pause.menu.append("Resume")
pause.menu.append("New Game")
pause.menu.append("Options")
pause.menu.append("Records")
pause.menu.append("Quit")
return pause
class GameBoard:
def __init__(self, size, color_b):
self.background = pg.Surface(size).convert()
self.resetBoard(color_b)
def resetBoard(self, color_b):
self.background.fill(color_b)
def drawBoard(self, grid, color):
frame = pg.Rect(0, 0, 32 * len(grid[0]), 32 * len(grid))
frame.center = (self.background.get_width() / 2, self.background.get_height() / 2)
pg.draw.rect(self.background, color, frame) | 30.113924 | 98 | 0.604876 |
42e7c7dcc59babd85bdf36b23f21c4cd20d36b4e | 914 | py | Python | lib/python/treadmill/cli/scheduler/servers.py | drienyov/treadmill | ce21537cd9a2fdb0567ac2aa3de1afcb2f6861de | [
"Apache-2.0"
] | 2 | 2017-10-31T18:48:20.000Z | 2018-03-04T20:35:20.000Z | lib/python/treadmill/cli/scheduler/servers.py | bretttegart/treadmill | 812109e31c503a6eddaee2d3f2e1faf2833b6aaf | [
"Apache-2.0"
] | null | null | null | lib/python/treadmill/cli/scheduler/servers.py | bretttegart/treadmill | 812109e31c503a6eddaee2d3f2e1faf2833b6aaf | [
"Apache-2.0"
] | null | null | null | """Show servers report."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import click
import pandas as pd
from treadmill import cli
from treadmill.cli.scheduler import fetch_report, print_report
from treadmill import restclient
def init():
"""Return top level command handler."""
@click.command()
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
@click.option('--match', help='Server name pattern match')
@click.option('--partition', help='Partition name pattern match')
@click.pass_context
def servers(ctx, match, partition):
"""View servers report."""
report = fetch_report(ctx.obj.get('api'), 'servers', match, partition)
report['valid_until'] = pd.to_datetime(report['valid_until'], unit='s')
print_report(report)
return servers
| 28.5625 | 79 | 0.727571 |
a6382fcaf230b75840b23256ea8d951fdeb63ec1 | 169 | py | Python | thugmeme/wsgi.py | sameerkumar18/thugmeme | 47cb47454693cce2ef6532d71e9076958991d426 | [
"MIT"
] | 1 | 2020-02-21T08:14:26.000Z | 2020-02-21T08:14:26.000Z | thugmeme/wsgi.py | sameerkumar18/thugmeme | 47cb47454693cce2ef6532d71e9076958991d426 | [
"MIT"
] | null | null | null | thugmeme/wsgi.py | sameerkumar18/thugmeme | 47cb47454693cce2ef6532d71e9076958991d426 | [
"MIT"
] | null | null | null | import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "thugmeme.settings")
application = get_wsgi_application()
| 21.125 | 68 | 0.828402 |
35ec2d0e6687da7f15fef5015af180ca853fa496 | 35,450 | py | Python | sdk/python/pulumi_aws_native/fsx/outputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws_native/fsx/outputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws_native/fsx/outputs.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'FileSystemAuditLogConfiguration',
'FileSystemClientConfigurations',
'FileSystemDiskIopsConfiguration',
'FileSystemLustreConfiguration',
'FileSystemNfsExports',
'FileSystemOntapConfiguration',
'FileSystemOpenZFSConfiguration',
'FileSystemRootVolumeConfiguration',
'FileSystemSelfManagedActiveDirectoryConfiguration',
'FileSystemTag',
'FileSystemUserAndGroupQuotas',
'FileSystemWindowsConfiguration',
]
@pulumi.output_type
class FileSystemAuditLogConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "fileAccessAuditLogLevel":
suggest = "file_access_audit_log_level"
elif key == "fileShareAccessAuditLogLevel":
suggest = "file_share_access_audit_log_level"
elif key == "auditLogDestination":
suggest = "audit_log_destination"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemAuditLogConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemAuditLogConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemAuditLogConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
file_access_audit_log_level: str,
file_share_access_audit_log_level: str,
audit_log_destination: Optional[str] = None):
pulumi.set(__self__, "file_access_audit_log_level", file_access_audit_log_level)
pulumi.set(__self__, "file_share_access_audit_log_level", file_share_access_audit_log_level)
if audit_log_destination is not None:
pulumi.set(__self__, "audit_log_destination", audit_log_destination)
@property
@pulumi.getter(name="fileAccessAuditLogLevel")
def file_access_audit_log_level(self) -> str:
return pulumi.get(self, "file_access_audit_log_level")
@property
@pulumi.getter(name="fileShareAccessAuditLogLevel")
def file_share_access_audit_log_level(self) -> str:
return pulumi.get(self, "file_share_access_audit_log_level")
@property
@pulumi.getter(name="auditLogDestination")
def audit_log_destination(self) -> Optional[str]:
return pulumi.get(self, "audit_log_destination")
@pulumi.output_type
class FileSystemClientConfigurations(dict):
def __init__(__self__, *,
clients: Optional[str] = None,
options: Optional[Sequence[str]] = None):
if clients is not None:
pulumi.set(__self__, "clients", clients)
if options is not None:
pulumi.set(__self__, "options", options)
@property
@pulumi.getter
def clients(self) -> Optional[str]:
return pulumi.get(self, "clients")
@property
@pulumi.getter
def options(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "options")
@pulumi.output_type
class FileSystemDiskIopsConfiguration(dict):
def __init__(__self__, *,
iops: Optional[int] = None,
mode: Optional[str] = None):
if iops is not None:
pulumi.set(__self__, "iops", iops)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def iops(self) -> Optional[int]:
return pulumi.get(self, "iops")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
return pulumi.get(self, "mode")
@pulumi.output_type
class FileSystemLustreConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoImportPolicy":
suggest = "auto_import_policy"
elif key == "automaticBackupRetentionDays":
suggest = "automatic_backup_retention_days"
elif key == "copyTagsToBackups":
suggest = "copy_tags_to_backups"
elif key == "dailyAutomaticBackupStartTime":
suggest = "daily_automatic_backup_start_time"
elif key == "dataCompressionType":
suggest = "data_compression_type"
elif key == "deploymentType":
suggest = "deployment_type"
elif key == "driveCacheType":
suggest = "drive_cache_type"
elif key == "exportPath":
suggest = "export_path"
elif key == "importPath":
suggest = "import_path"
elif key == "importedFileChunkSize":
suggest = "imported_file_chunk_size"
elif key == "perUnitStorageThroughput":
suggest = "per_unit_storage_throughput"
elif key == "weeklyMaintenanceStartTime":
suggest = "weekly_maintenance_start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemLustreConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemLustreConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemLustreConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_import_policy: Optional[str] = None,
automatic_backup_retention_days: Optional[int] = None,
copy_tags_to_backups: Optional[bool] = None,
daily_automatic_backup_start_time: Optional[str] = None,
data_compression_type: Optional[str] = None,
deployment_type: Optional[str] = None,
drive_cache_type: Optional[str] = None,
export_path: Optional[str] = None,
import_path: Optional[str] = None,
imported_file_chunk_size: Optional[int] = None,
per_unit_storage_throughput: Optional[int] = None,
weekly_maintenance_start_time: Optional[str] = None):
if auto_import_policy is not None:
pulumi.set(__self__, "auto_import_policy", auto_import_policy)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if copy_tags_to_backups is not None:
pulumi.set(__self__, "copy_tags_to_backups", copy_tags_to_backups)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if data_compression_type is not None:
pulumi.set(__self__, "data_compression_type", data_compression_type)
if deployment_type is not None:
pulumi.set(__self__, "deployment_type", deployment_type)
if drive_cache_type is not None:
pulumi.set(__self__, "drive_cache_type", drive_cache_type)
if export_path is not None:
pulumi.set(__self__, "export_path", export_path)
if import_path is not None:
pulumi.set(__self__, "import_path", import_path)
if imported_file_chunk_size is not None:
pulumi.set(__self__, "imported_file_chunk_size", imported_file_chunk_size)
if per_unit_storage_throughput is not None:
pulumi.set(__self__, "per_unit_storage_throughput", per_unit_storage_throughput)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter(name="autoImportPolicy")
def auto_import_policy(self) -> Optional[str]:
return pulumi.get(self, "auto_import_policy")
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[int]:
return pulumi.get(self, "automatic_backup_retention_days")
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> Optional[bool]:
return pulumi.get(self, "copy_tags_to_backups")
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[str]:
return pulumi.get(self, "daily_automatic_backup_start_time")
@property
@pulumi.getter(name="dataCompressionType")
def data_compression_type(self) -> Optional[str]:
return pulumi.get(self, "data_compression_type")
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> Optional[str]:
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="driveCacheType")
def drive_cache_type(self) -> Optional[str]:
return pulumi.get(self, "drive_cache_type")
@property
@pulumi.getter(name="exportPath")
def export_path(self) -> Optional[str]:
return pulumi.get(self, "export_path")
@property
@pulumi.getter(name="importPath")
def import_path(self) -> Optional[str]:
return pulumi.get(self, "import_path")
@property
@pulumi.getter(name="importedFileChunkSize")
def imported_file_chunk_size(self) -> Optional[int]:
return pulumi.get(self, "imported_file_chunk_size")
@property
@pulumi.getter(name="perUnitStorageThroughput")
def per_unit_storage_throughput(self) -> Optional[int]:
return pulumi.get(self, "per_unit_storage_throughput")
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[str]:
return pulumi.get(self, "weekly_maintenance_start_time")
@pulumi.output_type
class FileSystemNfsExports(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "clientConfigurations":
suggest = "client_configurations"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemNfsExports. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemNfsExports.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemNfsExports.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
client_configurations: Optional[Sequence['outputs.FileSystemClientConfigurations']] = None):
if client_configurations is not None:
pulumi.set(__self__, "client_configurations", client_configurations)
@property
@pulumi.getter(name="clientConfigurations")
def client_configurations(self) -> Optional[Sequence['outputs.FileSystemClientConfigurations']]:
return pulumi.get(self, "client_configurations")
@pulumi.output_type
class FileSystemOntapConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "deploymentType":
suggest = "deployment_type"
elif key == "automaticBackupRetentionDays":
suggest = "automatic_backup_retention_days"
elif key == "dailyAutomaticBackupStartTime":
suggest = "daily_automatic_backup_start_time"
elif key == "diskIopsConfiguration":
suggest = "disk_iops_configuration"
elif key == "endpointIpAddressRange":
suggest = "endpoint_ip_address_range"
elif key == "fsxAdminPassword":
suggest = "fsx_admin_password"
elif key == "preferredSubnetId":
suggest = "preferred_subnet_id"
elif key == "routeTableIds":
suggest = "route_table_ids"
elif key == "throughputCapacity":
suggest = "throughput_capacity"
elif key == "weeklyMaintenanceStartTime":
suggest = "weekly_maintenance_start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemOntapConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemOntapConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemOntapConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
deployment_type: str,
automatic_backup_retention_days: Optional[int] = None,
daily_automatic_backup_start_time: Optional[str] = None,
disk_iops_configuration: Optional['outputs.FileSystemDiskIopsConfiguration'] = None,
endpoint_ip_address_range: Optional[str] = None,
fsx_admin_password: Optional[str] = None,
preferred_subnet_id: Optional[str] = None,
route_table_ids: Optional[Sequence[str]] = None,
throughput_capacity: Optional[int] = None,
weekly_maintenance_start_time: Optional[str] = None):
pulumi.set(__self__, "deployment_type", deployment_type)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if disk_iops_configuration is not None:
pulumi.set(__self__, "disk_iops_configuration", disk_iops_configuration)
if endpoint_ip_address_range is not None:
pulumi.set(__self__, "endpoint_ip_address_range", endpoint_ip_address_range)
if fsx_admin_password is not None:
pulumi.set(__self__, "fsx_admin_password", fsx_admin_password)
if preferred_subnet_id is not None:
pulumi.set(__self__, "preferred_subnet_id", preferred_subnet_id)
if route_table_ids is not None:
pulumi.set(__self__, "route_table_ids", route_table_ids)
if throughput_capacity is not None:
pulumi.set(__self__, "throughput_capacity", throughput_capacity)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> str:
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[int]:
return pulumi.get(self, "automatic_backup_retention_days")
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[str]:
return pulumi.get(self, "daily_automatic_backup_start_time")
@property
@pulumi.getter(name="diskIopsConfiguration")
def disk_iops_configuration(self) -> Optional['outputs.FileSystemDiskIopsConfiguration']:
return pulumi.get(self, "disk_iops_configuration")
@property
@pulumi.getter(name="endpointIpAddressRange")
def endpoint_ip_address_range(self) -> Optional[str]:
return pulumi.get(self, "endpoint_ip_address_range")
@property
@pulumi.getter(name="fsxAdminPassword")
def fsx_admin_password(self) -> Optional[str]:
return pulumi.get(self, "fsx_admin_password")
@property
@pulumi.getter(name="preferredSubnetId")
def preferred_subnet_id(self) -> Optional[str]:
return pulumi.get(self, "preferred_subnet_id")
@property
@pulumi.getter(name="routeTableIds")
def route_table_ids(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "route_table_ids")
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> Optional[int]:
return pulumi.get(self, "throughput_capacity")
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[str]:
return pulumi.get(self, "weekly_maintenance_start_time")
@pulumi.output_type
class FileSystemOpenZFSConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "deploymentType":
suggest = "deployment_type"
elif key == "automaticBackupRetentionDays":
suggest = "automatic_backup_retention_days"
elif key == "copyTagsToBackups":
suggest = "copy_tags_to_backups"
elif key == "copyTagsToVolumes":
suggest = "copy_tags_to_volumes"
elif key == "dailyAutomaticBackupStartTime":
suggest = "daily_automatic_backup_start_time"
elif key == "diskIopsConfiguration":
suggest = "disk_iops_configuration"
elif key == "rootVolumeConfiguration":
suggest = "root_volume_configuration"
elif key == "throughputCapacity":
suggest = "throughput_capacity"
elif key == "weeklyMaintenanceStartTime":
suggest = "weekly_maintenance_start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemOpenZFSConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemOpenZFSConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemOpenZFSConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
deployment_type: str,
automatic_backup_retention_days: Optional[int] = None,
copy_tags_to_backups: Optional[bool] = None,
copy_tags_to_volumes: Optional[bool] = None,
daily_automatic_backup_start_time: Optional[str] = None,
disk_iops_configuration: Optional['outputs.FileSystemDiskIopsConfiguration'] = None,
root_volume_configuration: Optional['outputs.FileSystemRootVolumeConfiguration'] = None,
throughput_capacity: Optional[int] = None,
weekly_maintenance_start_time: Optional[str] = None):
pulumi.set(__self__, "deployment_type", deployment_type)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if copy_tags_to_backups is not None:
pulumi.set(__self__, "copy_tags_to_backups", copy_tags_to_backups)
if copy_tags_to_volumes is not None:
pulumi.set(__self__, "copy_tags_to_volumes", copy_tags_to_volumes)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if disk_iops_configuration is not None:
pulumi.set(__self__, "disk_iops_configuration", disk_iops_configuration)
if root_volume_configuration is not None:
pulumi.set(__self__, "root_volume_configuration", root_volume_configuration)
if throughput_capacity is not None:
pulumi.set(__self__, "throughput_capacity", throughput_capacity)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> str:
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[int]:
return pulumi.get(self, "automatic_backup_retention_days")
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> Optional[bool]:
return pulumi.get(self, "copy_tags_to_backups")
@property
@pulumi.getter(name="copyTagsToVolumes")
def copy_tags_to_volumes(self) -> Optional[bool]:
return pulumi.get(self, "copy_tags_to_volumes")
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[str]:
return pulumi.get(self, "daily_automatic_backup_start_time")
@property
@pulumi.getter(name="diskIopsConfiguration")
def disk_iops_configuration(self) -> Optional['outputs.FileSystemDiskIopsConfiguration']:
return pulumi.get(self, "disk_iops_configuration")
@property
@pulumi.getter(name="rootVolumeConfiguration")
def root_volume_configuration(self) -> Optional['outputs.FileSystemRootVolumeConfiguration']:
return pulumi.get(self, "root_volume_configuration")
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> Optional[int]:
return pulumi.get(self, "throughput_capacity")
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[str]:
return pulumi.get(self, "weekly_maintenance_start_time")
@pulumi.output_type
class FileSystemRootVolumeConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "copyTagsToSnapshots":
suggest = "copy_tags_to_snapshots"
elif key == "dataCompressionType":
suggest = "data_compression_type"
elif key == "nfsExports":
suggest = "nfs_exports"
elif key == "readOnly":
suggest = "read_only"
elif key == "userAndGroupQuotas":
suggest = "user_and_group_quotas"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemRootVolumeConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemRootVolumeConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemRootVolumeConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
copy_tags_to_snapshots: Optional[bool] = None,
data_compression_type: Optional[str] = None,
nfs_exports: Optional[Sequence['outputs.FileSystemNfsExports']] = None,
read_only: Optional[bool] = None,
user_and_group_quotas: Optional[Sequence['outputs.FileSystemUserAndGroupQuotas']] = None):
if copy_tags_to_snapshots is not None:
pulumi.set(__self__, "copy_tags_to_snapshots", copy_tags_to_snapshots)
if data_compression_type is not None:
pulumi.set(__self__, "data_compression_type", data_compression_type)
if nfs_exports is not None:
pulumi.set(__self__, "nfs_exports", nfs_exports)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
if user_and_group_quotas is not None:
pulumi.set(__self__, "user_and_group_quotas", user_and_group_quotas)
@property
@pulumi.getter(name="copyTagsToSnapshots")
def copy_tags_to_snapshots(self) -> Optional[bool]:
return pulumi.get(self, "copy_tags_to_snapshots")
@property
@pulumi.getter(name="dataCompressionType")
def data_compression_type(self) -> Optional[str]:
return pulumi.get(self, "data_compression_type")
@property
@pulumi.getter(name="nfsExports")
def nfs_exports(self) -> Optional[Sequence['outputs.FileSystemNfsExports']]:
return pulumi.get(self, "nfs_exports")
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[bool]:
return pulumi.get(self, "read_only")
@property
@pulumi.getter(name="userAndGroupQuotas")
def user_and_group_quotas(self) -> Optional[Sequence['outputs.FileSystemUserAndGroupQuotas']]:
return pulumi.get(self, "user_and_group_quotas")
@pulumi.output_type
class FileSystemSelfManagedActiveDirectoryConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dnsIps":
suggest = "dns_ips"
elif key == "domainName":
suggest = "domain_name"
elif key == "fileSystemAdministratorsGroup":
suggest = "file_system_administrators_group"
elif key == "organizationalUnitDistinguishedName":
suggest = "organizational_unit_distinguished_name"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemSelfManagedActiveDirectoryConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemSelfManagedActiveDirectoryConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemSelfManagedActiveDirectoryConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
dns_ips: Optional[Sequence[str]] = None,
domain_name: Optional[str] = None,
file_system_administrators_group: Optional[str] = None,
organizational_unit_distinguished_name: Optional[str] = None,
password: Optional[str] = None,
user_name: Optional[str] = None):
if dns_ips is not None:
pulumi.set(__self__, "dns_ips", dns_ips)
if domain_name is not None:
pulumi.set(__self__, "domain_name", domain_name)
if file_system_administrators_group is not None:
pulumi.set(__self__, "file_system_administrators_group", file_system_administrators_group)
if organizational_unit_distinguished_name is not None:
pulumi.set(__self__, "organizational_unit_distinguished_name", organizational_unit_distinguished_name)
if password is not None:
pulumi.set(__self__, "password", password)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter(name="dnsIps")
def dns_ips(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "dns_ips")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> Optional[str]:
return pulumi.get(self, "domain_name")
@property
@pulumi.getter(name="fileSystemAdministratorsGroup")
def file_system_administrators_group(self) -> Optional[str]:
return pulumi.get(self, "file_system_administrators_group")
@property
@pulumi.getter(name="organizationalUnitDistinguishedName")
def organizational_unit_distinguished_name(self) -> Optional[str]:
return pulumi.get(self, "organizational_unit_distinguished_name")
@property
@pulumi.getter
def password(self) -> Optional[str]:
return pulumi.get(self, "password")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
return pulumi.get(self, "user_name")
@pulumi.output_type
class FileSystemTag(dict):
def __init__(__self__, *,
key: str,
value: str):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
return pulumi.get(self, "value")
@pulumi.output_type
class FileSystemUserAndGroupQuotas(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "storageCapacityQuotaGiB":
suggest = "storage_capacity_quota_gi_b"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemUserAndGroupQuotas. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemUserAndGroupQuotas.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemUserAndGroupQuotas.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: Optional[int] = None,
storage_capacity_quota_gi_b: Optional[int] = None,
type: Optional[str] = None):
if id is not None:
pulumi.set(__self__, "id", id)
if storage_capacity_quota_gi_b is not None:
pulumi.set(__self__, "storage_capacity_quota_gi_b", storage_capacity_quota_gi_b)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[int]:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="storageCapacityQuotaGiB")
def storage_capacity_quota_gi_b(self) -> Optional[int]:
return pulumi.get(self, "storage_capacity_quota_gi_b")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
@pulumi.output_type
class FileSystemWindowsConfiguration(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "throughputCapacity":
suggest = "throughput_capacity"
elif key == "activeDirectoryId":
suggest = "active_directory_id"
elif key == "auditLogConfiguration":
suggest = "audit_log_configuration"
elif key == "automaticBackupRetentionDays":
suggest = "automatic_backup_retention_days"
elif key == "copyTagsToBackups":
suggest = "copy_tags_to_backups"
elif key == "dailyAutomaticBackupStartTime":
suggest = "daily_automatic_backup_start_time"
elif key == "deploymentType":
suggest = "deployment_type"
elif key == "preferredSubnetId":
suggest = "preferred_subnet_id"
elif key == "selfManagedActiveDirectoryConfiguration":
suggest = "self_managed_active_directory_configuration"
elif key == "weeklyMaintenanceStartTime":
suggest = "weekly_maintenance_start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileSystemWindowsConfiguration. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileSystemWindowsConfiguration.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileSystemWindowsConfiguration.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
throughput_capacity: int,
active_directory_id: Optional[str] = None,
aliases: Optional[Sequence[str]] = None,
audit_log_configuration: Optional['outputs.FileSystemAuditLogConfiguration'] = None,
automatic_backup_retention_days: Optional[int] = None,
copy_tags_to_backups: Optional[bool] = None,
daily_automatic_backup_start_time: Optional[str] = None,
deployment_type: Optional[str] = None,
preferred_subnet_id: Optional[str] = None,
self_managed_active_directory_configuration: Optional['outputs.FileSystemSelfManagedActiveDirectoryConfiguration'] = None,
weekly_maintenance_start_time: Optional[str] = None):
pulumi.set(__self__, "throughput_capacity", throughput_capacity)
if active_directory_id is not None:
pulumi.set(__self__, "active_directory_id", active_directory_id)
if aliases is not None:
pulumi.set(__self__, "aliases", aliases)
if audit_log_configuration is not None:
pulumi.set(__self__, "audit_log_configuration", audit_log_configuration)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if copy_tags_to_backups is not None:
pulumi.set(__self__, "copy_tags_to_backups", copy_tags_to_backups)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if deployment_type is not None:
pulumi.set(__self__, "deployment_type", deployment_type)
if preferred_subnet_id is not None:
pulumi.set(__self__, "preferred_subnet_id", preferred_subnet_id)
if self_managed_active_directory_configuration is not None:
pulumi.set(__self__, "self_managed_active_directory_configuration", self_managed_active_directory_configuration)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> int:
return pulumi.get(self, "throughput_capacity")
@property
@pulumi.getter(name="activeDirectoryId")
def active_directory_id(self) -> Optional[str]:
return pulumi.get(self, "active_directory_id")
@property
@pulumi.getter
def aliases(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "aliases")
@property
@pulumi.getter(name="auditLogConfiguration")
def audit_log_configuration(self) -> Optional['outputs.FileSystemAuditLogConfiguration']:
return pulumi.get(self, "audit_log_configuration")
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[int]:
return pulumi.get(self, "automatic_backup_retention_days")
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> Optional[bool]:
return pulumi.get(self, "copy_tags_to_backups")
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[str]:
return pulumi.get(self, "daily_automatic_backup_start_time")
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> Optional[str]:
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="preferredSubnetId")
def preferred_subnet_id(self) -> Optional[str]:
return pulumi.get(self, "preferred_subnet_id")
@property
@pulumi.getter(name="selfManagedActiveDirectoryConfiguration")
def self_managed_active_directory_configuration(self) -> Optional['outputs.FileSystemSelfManagedActiveDirectoryConfiguration']:
return pulumi.get(self, "self_managed_active_directory_configuration")
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[str]:
return pulumi.get(self, "weekly_maintenance_start_time")
| 41.461988 | 169 | 0.679492 |
8710e341bf8332c2dcb5221163d7e3005aaa71a2 | 617 | py | Python | mysite/myapp/migrations/0004_auto_20201008_1703.py | Jvong-max/instagramclone | d0658c02ba57918f832ab0040f8cb62f6485667b | [
"MIT"
] | null | null | null | mysite/myapp/migrations/0004_auto_20201008_1703.py | Jvong-max/instagramclone | d0658c02ba57918f832ab0040f8cb62f6485667b | [
"MIT"
] | 5 | 2021-06-04T23:53:10.000Z | 2022-01-13T03:23:31.000Z | mysite/myapp/migrations/0004_auto_20201008_1703.py | Jvong-max/instagramclone | d0658c02ba57918f832ab0040f8cb62f6485667b | [
"MIT"
] | 2 | 2020-09-14T06:37:18.000Z | 2021-10-04T20:53:19.000Z | # Generated by Django 3.1.2 on 2020-10-08 17:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0003_auto_20201006_1713'),
]
operations = [
migrations.AddField(
model_name='suggestionmodel',
name='image',
field=models.ImageField(max_length=144, null=True, upload_to='uploads/%Y/%m/%d/'),
),
migrations.AddField(
model_name='suggestionmodel',
name='image_description',
field=models.CharField(max_length=240, null=True),
),
]
| 25.708333 | 94 | 0.598055 |
6df8072614dba19010eaa8cda957d27f2755e281 | 24,234 | py | Python | pypeit/specobjs.py | seib2/PypeIt | 18ce33aa8aa12b8ee51303ad87a723ec81e0e6f8 | [
"BSD-3-Clause"
] | 1 | 2018-12-18T21:56:23.000Z | 2018-12-18T21:56:23.000Z | pypeit/specobjs.py | feigewang/PypeIt | 4c68b38cb907345a480d7afee58200a05ecd4556 | [
"BSD-3-Clause"
] | null | null | null | pypeit/specobjs.py | feigewang/PypeIt | 4c68b38cb907345a480d7afee58200a05ecd4556 | [
"BSD-3-Clause"
] | null | null | null | """ Module for the SpecObjs and SpecObj classes
"""
import copy
import re
import numpy as np
from scipy import interpolate
from astropy import units
from astropy.table import Table
from astropy.units import Quantity
from astropy.utils import isiterable
from linetools.spectra import xspectrum1d
from pypeit import msgs
from pypeit.core import parse
naming_model = {}
for key in ['SPAT', 'SLIT', 'DET', 'SCI','OBJ', 'ORDER']:
naming_model[key.lower()] = key
class SpecObj(object):
"""Class to handle object spectra from a single exposure
One generates one of these Objects for each spectrum in the exposure. They are instantiated by the object
finding routine, and then all spectral extraction information for the object are assigned as attributes
Args:
shape (tuple): nspec, nspat
dimensions of the spectral image that the object is identified on
slit_spat_pos (tuple): tuple of floats (spat_left,spat_right)
The spatial pixel location of the left and right slit trace arrays evaluated at slit_spec_pos (see below). These
will be in the range (0,nspat)
slit_spec_pos (float):
The midpoint of the slit location in the spectral direction. This will typically be nspec/2, but must be in the
range (0,nspec)
det (int): Detector number
config (str, optional): Instrument configuration
objtype (str, optional)
Type of object ('unknown', 'standard', 'science')
slitid (int, optional):
Identifier for the slit (max=9999)
Attributes:
slitcen (float): Center of slit in fraction of total (trimmed) detector size at ypos
objid (int): Identifier for the object (max=999)
flex_shift (float): Flexure correction in pixels
Extraction dict's
'WAVE' : wave_opt # Optimally extracted wavelengths
'COUNTS' : flux_opt # Optimally extracted flux
'COUNTS_IVAR' : mivar_opt # Inverse variance of optimally extracted flux using modelivar image
'COUNTS_NIVAR' : nivar_opt # Optimally extracted noise variance (sky + read noise) only
'MASK' : mask_opt # Mask for optimally extracted flux
'COUNTS_SKY' : sky_opt # Optimally extracted sky
'COUNTS_RN' : rn_opt # Square root of optimally extracted read noise squared
'FRAC_USE' : frac_use # Fraction of pixels in the object profile subimage used for this extraction
'CHI2' : chi2 # Reduced chi2 of the model fit for this spectral pixel
"""
# Attributes
def __init__(self, shape, slit_spat_pos, slit_spec_pos, det=1, setup=None, idx=None,
slitid=999, orderindx=999, objtype='unknown', pypeline='unknown', spat_pixpos=None, config=None):
#Assign from init parameters
self.shape = shape
self.slit_spat_pos = slit_spat_pos
self.slit_spec_pos = slit_spec_pos
self.setup = setup
self.slitid = slitid
self.det = det
self.objtype = objtype
self.config = config
self.pypeline = pypeline
# ToDo add all attributes here and to the documentaiton
# Object finding attributes
self.sign = 1.0
self.objid = 999
self.spat_fracpos = None
self.smash_peakflux = None
self.fwhm = None
self.trace_spat = None
self.spat_pixpos = spat_pixpos # Position on the image in pixels at the midpoint of the slit in spectral direction
self.maskwidth = None
self.min_spat = None
self.max_spat = None
self.prof_nsigma = None
self.fwhmfit = None
self.smash_nsig = None
# Wavelength items
self.flex_shift = 0.
# Some things for echelle functionality
self.ech_order = 0 # Needs a default value
self.ech_orderindx = orderindx
self.ech_objid = 999
self.ech_snr = None
self.ech_fracpos = None
self.ech_frac_was_fit = None
self.ech_usepca = False
# Attributes for HAND apertures, which are object added to the extraction by hand
self.hand_extract_spec = None
self.hand_extract_spat = None
self.hand_extract_det = None
self.hand_extract_fwhm = None
self.hand_extract_flag = False
# Dictionaries holding boxcar and optimal extraction parameters
self.boxcar = {} # Boxcar extraction 'wave', 'counts', 'var', 'sky', 'mask', 'flam', 'flam_var'
self.optimal = {} # Optimal extraction 'wave', 'counts', 'var', 'sky', 'mask', 'flam', 'flam_var'
# Generate IDs
#self.slitid = int(np.round(self.slitcen*1e4))
#self.objid = int(np.round(xobj*1e3))
# Set index
if idx is None:
self.set_idx()
else:
self.idx = idx
#
@staticmethod
def sobjs_key():
"""
This function returns the dictionary that defines the mapping between specobjs attributes and the fits header
cards
Returns:
dict:
"""
sobjs_key_dict = dict(det='DET',
objid='OBJID',
slitid='SLITID',
ech_objid='ECHOBJID',
ech_orderindx='ECHOINDX',
ech_order='ECHORDER',
pypeline='PYPELINE')
return sobjs_key_dict
def set_idx(self):
"""
Generate a unique index for this spectrum
Sets self.idx internally
Returns:
str: :attr:`self.idx`
"""
# Detector string
sdet = parse.get_dnum(self.det, prefix=False)
if 'Echelle' in self.pypeline:
# ObjID
self.idx = naming_model['obj']
if self.ech_objid is None:
self.idx += '----'
else:
self.idx += '{:04d}'.format(self.ech_objid)
self.idx += '-'+naming_model['order']
# Order
if self.ech_orderindx is None:
self.idx += '----'
else:
self.idx += '{:04d}'.format(self.ech_order)
else:
# Spat
self.idx = naming_model['spat']
if self.spat_pixpos is None:
self.idx += '----'
else:
self.idx += '{:04d}'.format(int(np.rint(self.spat_pixpos)))
# Slit
self.idx += '-'+naming_model['slit']
if self.slitid is None:
self.idx += '----'
else:
self.idx += '{:04d}'.format(self.slitid)
self.idx += '-{:s}{:s}'.format(naming_model['det'], sdet)
# Return
return self.idx
def check_trace(self, trace, toler=1.):
"""
Check that the input trace matches the defined specobjexp
Args:
trace (ndarray): Trace of the object
toler (float): Tolerance for matching, in pixels
Returns:
bool: True = match within tolerance
"""
# Trace
yidx = int(np.round(self.ypos*trace.size))
obj_trc = trace[yidx]
# self
nslit = self.shape[1]*(self.xslit[1]-self.xslit[0])
xobj_pix = self.shape[1]*self.xslit[0] + nslit*self.xobj
# Check
if np.abs(obj_trc-xobj_pix) < toler:
return True
else:
return False
def copy(self):
"""
Generate a copy of this object
Returns:
SpecObj
"""
sobj_copy = SpecObj(self.shape, self.slit_spat_pos, self.slit_spec_pos) # Instantiate
# sobj_copy.__dict__ = self.__dict__.copy() # Copy over all attributes
# sobj_copy.boxcar = self.boxcar.copy() # Copy boxcar and optimal dicts
# sobj_copy.optimal = self.optimal.copy()
sobj_copy.__dict__ = copy.deepcopy(self.__dict__)
sobj_copy.boxcar = copy.deepcopy(self.boxcar) # Copy boxcar and optimal dicts
sobj_copy.optimal = copy.deepcopy(self.optimal)
# These attributes are numpy arrays that don't seem to copy from the lines above??
return sobj_copy
def flexure_interp(self, sky_wave, fdict):
"""
Apply interpolation with the flexure dict
Args:
sky_wave (np.ndarray): Wavelengths of the extracted sky
fdict (dict): Holds the various flexure items
Returns:
np.ndarray: New sky spectrum (mainly for QA)
"""
# Simple interpolation to apply
npix = len(sky_wave)
x = np.linspace(0., 1., npix)
# Apply
for attr in ['boxcar', 'optimal']:
if not hasattr(self, attr):
continue
if 'WAVE' in getattr(self, attr).keys():
msgs.info("Applying flexure correction to {0:s} extraction for object:".format(attr) +
msgs.newline() + "{0:s}".format(str(self)))
f = interpolate.interp1d(x, sky_wave, bounds_error=False, fill_value="extrapolate")
getattr(self, attr)['WAVE'] = f(x + fdict['shift'] / (npix - 1)) * units.AA
# Shift sky spec too
cut_sky = fdict['sky_spec']
x = np.linspace(0., 1., cut_sky.npix)
f = interpolate.interp1d(x, cut_sky.wavelength.value, bounds_error=False, fill_value="extrapolate")
twave = f(x + fdict['shift'] / (cut_sky.npix - 1)) * units.AA
new_sky = xspectrum1d.XSpectrum1D.from_tuple((twave, cut_sky.flux))
# Save
self.flex_shift = fdict['shift']
# Return
return new_sky
def to_xspec1d(self, extraction='optimal'):
"""
Convert the SpecObj to an XSpectrum1D object
Args:
extraction (str): Extraction method to convert
Returns:
linetools.spectra.xspectrum1d.XSpectrum1D: Spectrum object
"""
extract = getattr(self, extraction)
if len(extract) == 0:
msgs.warn("This object has not been extracted with extract={}".format(extraction))
if 'FLAM' in extract:
flux = extract['FLAM']
sig = extract['FLAM_SIG']
else:
flux = extract['COUNTS']
sig = np.zeros_like(flux)
gdc = extract['COUNTS_IVAR'] > 0.
sig[gdc] = 1./np.sqrt(extract['COUNTS_IVAR'][gdc])
# Create
xspec = xspectrum1d.XSpectrum1D.from_tuple((extract['WAVE'], flux, sig))
return xspec
def show(self, extraction='optimal'):
"""
Show the spectrum by converting it to a XSpectrum1D object
Args:
extraction (str): Extraction option 'optimal' or 'boxcar'
Returns:
"""
extract = getattr(self, extraction)
# Generate an XSpec
xspec = self.to_xspec1d(extraction=extraction)
if xspec is None:
return
xspec.plot(xspec=True)
def __getitem__(self, key):
# Access the DB groups
return getattr(self, key)
def __repr__(self):
# Create a single summary table for one object, so that the representation is always the same
sobjs = SpecObjs(specobjs=[self])
return sobjs.summary.__repr__()
class SpecObjs(object):
"""
Object to hold a set of SpecObj objects
Args:
specobjs (ndarray or list, optional): One or more SpecObj objects
Internals:
summary (astropy.table.Table):
__getitem__ is overloaded to allow one to pull an attribute or a
portion of the SpecObjs list
Args:
item (str or int (or slice)
Returns:
item (object, SpecObj or SpecObjs): Depends on input item..
__setitem__ is over-loaded using our custom set() method
Args:
name (str): Item to set
value (anything) : Value of the item
Returns:
__getattr__ is overloaded to generate an array of attribute 'k' from the specobjs
First attempts to grab data from the Summary table, then the list
"""
def __init__(self, specobjs=None):
if specobjs is None:
self.specobjs = np.array([])
else:
if isinstance(specobjs, (list, np.ndarray)):
specobjs = np.array(specobjs)
self.specobjs = specobjs
# Internal summary Table
self.build_summary()
@property
def nobj(self):
"""
Return the number of SpecObj objects
Returns:
int
"""
return self.specobjs.size
def get_std(self):
"""
Return the standard star from this Specobjs. For MultiSlit this
will be a single specobj in SpecObjs container, for Echelle it
will be the standard for all the orders.
Args:
Returns:
SpecObj or SpecObjs
"""
# Is this MultiSlit or Echelle
pypeline = (self.pypeline)[0]
if 'MultiSlit' in pypeline:
nspec = self[0].optimal['COUNTS'].size
SNR = np.zeros(self.nobj)
# Have to do a loop to extract the counts for all objects
for iobj in range(self.nobj):
SNR[iobj] = np.median(self[iobj].optimal['COUNTS']*np.sqrt(self[iobj].optimal['COUNTS_IVAR']))
istd = SNR.argmax()
return SpecObjs(specobjs=[self[istd]])
elif 'Echelle' in pypeline:
uni_objid = np.unique(self.ech_objid)
uni_order = np.unique(self.ech_orderindx)
nobj = len(uni_objid)
norders = len(uni_order)
SNR = np.zeros((norders, nobj))
for iobj in range(nobj):
for iord in range(norders):
ind = (self.ech_objid == uni_objid[iobj]) & (self.ech_orderindx == uni_order[iord])
spec = self[ind]
SNR[iord, iobj] = np.median(spec[0].optimal['COUNTS']*np.sqrt(spec[0].optimal['COUNTS_IVAR']))
SNR_all = np.sqrt(np.sum(SNR**2,axis=0))
objid_std = uni_objid[SNR_all.argmax()]
indx = self.ech_objid == objid_std
return SpecObjs(specobjs=self[indx])
else:
msgs.error('Unknown pypeline')
def append_neg(self, sobjs_neg):
"""
Append negative objects and change the sign of their objids for IR reductions
Args:
sobjs_neg (SpecObjs):
"""
# Assign the sign and the objids
for spec in sobjs_neg:
spec.sign = -1.0
try:
spec.objid = -spec.objid
except TypeError:
pass
try:
spec.ech_objid = -spec.ech_objid
except TypeError:
pass
self.add_sobj(sobjs_neg)
# Sort objects according to their spatial location. Necessary for the extraction to properly work
if self.nobj > 0:
spat_pixpos = self.spat_pixpos
self.specobjs = self.specobjs[spat_pixpos.argsort()]
def purge_neg(self):
"""
Purge negative objects from specobjs for IR reductions
"""
# Assign the sign and the objids
if self.nobj > 0:
index = (self.objid < 0) | (self.ech_objid < 0)
self.remove_sobj(index)
def add_sobj(self, sobj):
"""
Add one or more SpecObj
The summary table is rebuilt
Args:
sobj (SpecObj or list or ndarray): On or more SpecObj objects
Returns:
"""
if isinstance(sobj, SpecObj):
self.specobjs = np.append(self.specobjs, [sobj])
elif isinstance(sobj, (np.ndarray,list)):
self.specobjs = np.append(self.specobjs, sobj)
elif isinstance(sobj, SpecObjs):
self.specobjs = np.append(self.specobjs, sobj)
# Rebuild summary table
self.build_summary()
def build_summary(self):
"""
Build the internal Summary Table
Returns:
Builds self.summary Table internally
"""
# Dummy?
if len(self.specobjs) == 0:
self.summary = Table()
return
#
atts = self.specobjs[0].__dict__.keys()
uber_dict = {}
for key in atts:
uber_dict[key] = []
for sobj in self.specobjs:
uber_dict[key] += [getattr(sobj, key)]
# Build it
self.summary = Table(uber_dict)
def remove_sobj(self, index):
"""
Remove an object
Args:
index: int
Returns:
"""
msk = np.ones(self.specobjs.size, dtype=bool)
msk[index] = False
# Do it
self.specobjs = self.specobjs[msk]
# Update
self.build_summary()
def copy(self):
"""
Generate a copy of self
Returns:
SpecObjs
"""
sobj_copy = SpecObjs()
for sobj in self.specobjs:
sobj_copy.add_sobj(sobj.copy())
sobj_copy.build_summary()
return sobj_copy
def set_idx(self):
"""
Set the idx in all the SpecObj
Update the summary Table
Returns:
"""
for sobj in self.specobjs:
sobj.set_idx()
self.build_summary()
def __getitem__(self, item):
if isinstance(item, str):
return self.__getattr__(item)
elif isinstance(item, (int, np.integer)):
return self.specobjs[item] # TODO Is this using pointers or creating new data????
elif (isinstance(item, slice) or # Stolen from astropy.table
isinstance(item, np.ndarray) or
isinstance(item, list) or
isinstance(item, tuple) and all(isinstance(x, np.ndarray) for x in item)):
# here for the many ways to give a slice; a tuple of ndarray
# is produced by np.where, as in t[np.where(t['a'] > 2)]
# For all, a new table is constructed with slice of all columns
return SpecObjs(specobjs=self.specobjs[item])
# TODO this code fails for assignments of this nature sobjs[:].attribute = np.array(5)
def __setitem__(self, name, value):
self.set(slice(0,self.nobj), name, value)
def set(self, islice, attr, value):
"""
Set the attribute for a slice of the specobjs
Args:
islice (int, ndarray of bool, slice): Indicates SpecObj to affect
attr (str):
value (anything) : Value of the item
Returns:
"""
sub_sobjs = self.specobjs[islice]
if isiterable(value):
if sub_sobjs.size == len(value): # Assume you want each paired up
for kk,sobj in enumerate(sub_sobjs):
setattr(sobj, attr, value[kk])
return
# Assuming scalar assignment
if isinstance(sub_sobjs, SpecObj):
setattr(sub_sobjs, attr, value)
else:
for sobj in sub_sobjs:
setattr(sobj, attr, value)
return
def __getattr__(self, k):
# Overloaded
self.build_summary()
# Special case(s)
if k in self.summary.keys(): # _data
lst = self.summary[k]
else:
lst = None
# specobjs last!
if lst is None:
if len(self.specobjs) == 0:
raise ValueError("Attribute not available!")
try:
lst = [getattr(specobj, k) for specobj in self.specobjs]
except ValueError:
raise ValueError("Attribute does not exist")
# Recast as an array
return lst_to_array(lst)
# Printing
def __repr__(self):
return self.summary.__repr__()
def __len__(self):
return len(self.specobjs)
def keys(self):
self.build_summary()
return self.summary.keys()
def objnm_to_dict(objnm):
""" Convert an object name or list of them into a dict
Parameters
----------
objnm : str or list of str
Returns
-------
odict : dict
Object value or list of object values
"""
if isinstance(objnm, list):
tdict = {}
for kk,iobj in enumerate(objnm):
idict = objnm_to_dict(iobj)
if kk == 0:
for key in idict.keys():
tdict[key] = []
# Fill
for key in idict.keys():
tdict[key].append(idict[key])
# Generate the Table
return tdict
# Generate the dict
prs = objnm.split('-')
odict = {}
for iprs in prs:
# Find first character that is an integer
idig = re.search("\d", iprs).start()
odict[iprs[:idig]] = int(iprs[idig:])
# Return
return odict
def mtch_obj_to_objects(iobj, objects, stol=50, otol=10, **kwargs):
"""
Parameters
----------
iobj : str
Object identifier in format O###-S####-D##
objects : list
List of object identifiers
stol : int
Tolerance in slit matching
otol : int
Tolerance in object matching
Returns
-------
matches : list
indices of matches in objects
None if none
indcies : list
"""
# Parse input object
odict = objnm_to_dict(iobj)
# Generate a Table of the objects
tbl = Table(objnm_to_dict(objects))
# Logic on object, slit and detector [ignoring sciidx for now]
gdrow = (np.abs(tbl[naming_model['spat']]-odict[naming_model['spat']]) < otol) & (
np.abs(tbl[naming_model['slit']]-odict[naming_model['slit']]) < stol) & (
tbl[naming_model['det']] == odict[naming_model['det']])
if np.sum(gdrow) == 0:
return None
else:
return np.array(objects)[gdrow].tolist(), np.where(gdrow)[0].tolist()
def dummy_specobj(shape, det=1, extraction=True):
""" Generate dummy specobj classes
Parameters
----------
shape : tuple
naxis1, naxis0
Returns
sobj_list: list
Pair of SpecObj objects
-------
"""
config = 'AA'
scidx = 5 # Could be wrong
xslit = (0.3,0.7) # Center of the detector
ypos = 0.5
xobjs = [0.4, 0.6]
sobj_list = []
for jj,xobj in enumerate(xobjs):
specobj = SpecObj(shape, 1240, xslit, spat_pixpos=900, det=det, config=config)
specobj.slitid = jj+1
#specobj = SpecObj(shape, config, scidx, det, xslit, ypos, xobj)
# Dummy extraction?
if extraction:
npix = 2001
specobj.boxcar['WAVE'] = np.linspace(4000., 6000., npix)*units.AA
specobj.boxcar['COUNTS'] = 50.*(specobj.boxcar['WAVE'].value/5000.)**-1.
specobj.boxcar['COUNTS_IVAR'] = 1./specobj.boxcar['COUNTS'].copy()
# Append
sobj_list.append(specobj)
# Return
return sobj_list
def lst_to_array(lst, mask=None):
"""
Simple method to convert a list to an array
Allows for a list of Quantity objects
Args:
lst : list
Should be number or Quantities
mask (ndarray of bool, optional): Limit to a subset of the list. True=good
Returns:
ndarray or Quantity array: Converted list
"""
if mask is None:
mask = np.array([True]*len(lst))
if isinstance(lst[0], Quantity):
return Quantity(lst)[mask]
else:
return np.array(lst)[mask]
def unravel_specobjs(specobjs):
"""
Likely to be Deprecated
Method to unwrap nested specobjs objects into a single list
Args:
specobjs (list of lists or list of SpecObj):
Returns:
list: list of SpecObj
"""
# Wrapped is all None and lists
ans = [isinstance(ispec, (list, type(None))) for ispec in specobjs]
if np.all(ans):
all_specobj = []
for det in range(len(specobjs)): # detector loop
if specobjs[det] is None:
continue
for sl in range(len(specobjs[det])): # slit loop
for spobj in specobjs[det][sl]: # object loop
all_specobj.append(spobj)
else:
all_specobj = specobjs
# Return
return all_specobj
| 31.069231 | 124 | 0.571594 |
e3a676d073363aa3a7dd346f56038ac2059bcc2f | 6,843 | py | Python | src/services/application_service.py | ismetacar/ertis-auth | 64727cc8201d5fcc955485e94262500d63ff4b17 | [
"MIT"
] | 17 | 2020-06-17T15:28:59.000Z | 2021-09-21T19:18:14.000Z | src/services/application_service.py | ismetacar/Ertis-Auth | 5521eb8a0b11fca7c5ff2a4ecc6cc0b9af59aa8f | [
"MIT"
] | 5 | 2020-06-17T21:22:56.000Z | 2021-05-02T19:10:05.000Z | src/services/application_service.py | ismetacar/Ertis-Auth | 5521eb8a0b11fca7c5ff2a4ecc6cc0b9af59aa8f | [
"MIT"
] | 2 | 2021-03-02T17:08:07.000Z | 2021-04-07T18:11:59.000Z | import copy
import datetime
from bson import ObjectId
from src.resources.applications.applications import (
generate_app_secrets,
pop_non_updatable_fields,
)
from src.resources.generic import query
from src.utils.errors import ErtisError
from src.utils.events import Event
from src.utils.json_helpers import maybe_object_id
class ApplicationService(object):
def __init__(self, db, role_service):
self.db = db
self.role_service = role_service
async def create_application(self, resource, utilizer, event_service):
resource['membership_id'] = utilizer['membership_id']
resource['_id'] = ObjectId()
resource['sys'] = {
'created_at': datetime.datetime.utcnow(),
'created_by': utilizer.get('username', utilizer.get('name'))
}
await self.role_service.get_role_by_slug(resource['role'], utilizer['membership_id'])
await self._ensure_name_is_unique_in_membership(resource)
resource = generate_app_secrets(resource)
await self.db.applications.insert_one(resource)
await event_service.on_event((Event(**{
'document': resource,
'prior': {},
'utilizer': utilizer,
'type': 'ApplicationCreatedEvent',
'membership_id': utilizer['membership_id'],
'sys': {
'created_at': datetime.datetime.utcnow(),
'created_by': utilizer.get('username', utilizer.get('name'))
}
})))
return resource
async def get_application(self, resource_id, user):
return await self._find_application(resource_id, user['membership_id'])
async def update_application(self, application_id, data, utilizer, event_service):
resource = await self._find_application(application_id, utilizer['membership_id'])
provided_body = pop_non_updatable_fields(data)
_resource = self._check_identicality(resource, provided_body)
resource['sys'].update({
'modified_at': datetime.datetime.utcnow(),
'modified_by': utilizer.get('username', utilizer.get('name'))
})
provided_body['sys'] = resource['sys']
updated_application = await self._update_application_with_body(
application_id, utilizer['membership_id'], provided_body
)
_resource['_id'] = str(_resource['_id'])
updated_application['_id'] = str(updated_application['_id'])
await event_service.on_event((Event(**{
'document': updated_application,
'prior': _resource,
'utilizer': utilizer,
'type': 'ApplicationUpdatedEvent',
'membership_id': utilizer['membership_id'],
'sys': {
'created_at': datetime.datetime.utcnow(),
'created_by': utilizer.get('username', utilizer.get('name'))
}
})))
return updated_application
async def delete_application(self, application_id, utilizer, event_service):
application = await self._find_application(application_id, utilizer['membership_id'])
await self._remove_application(utilizer['membership_id'], application_id)
application['_id'] = str(application['_id'])
await event_service.on_event((Event(**{
'document': {},
'prior': application,
'utilizer': utilizer,
'type': 'ApplicationDeletedEvent',
'membership_id': utilizer['membership_id'],
'sys': {
'created_at': datetime.datetime.utcnow(),
'created_by': utilizer.get('username', utilizer.get('name'))
}
})))
async def query_applications(self, membership_id, where, select, limit, skip, sort):
return await query(self.db, membership_id, where, select, limit, skip, sort, 'applications')
async def _ensure_name_is_unique_in_membership(self, application):
exists_application = await self.db.applications.find_one({
'name': application['name'],
'membership_id': application['membership_id']
})
if exists_application:
raise ErtisError(
err_msg="Application already exists in db with given name: <{}>".format(application['name']),
err_code="errors.applicationNameAlreadyExists",
status_code=400
)
async def _find_application(self, application_id, membership_id):
application = await self.db.applications.find_one({
'_id': maybe_object_id(application_id),
'membership_id': membership_id
})
if not application:
raise ErtisError(
err_code="errors.applicationNotFound",
err_msg="Application was not found by given id: <{}>".format(application_id),
status_code=404
)
return application
async def _remove_application(self, membership_id, application_id):
try:
await self.db.applications.delete_one({
'_id': maybe_object_id(application_id),
'membership_id': membership_id
})
except Exception as e:
raise ErtisError(
err_msg="An error occurred while deleting user",
err_code="errors.errorOccurredWhileDeletingUser",
status_code=500,
context={
'platform_id': application_id
},
reason=str(e)
)
async def _update_application_with_body(self, application_id, membership_id, provided_body):
try:
await self.db.applications.update_one(
{
'_id': maybe_object_id(application_id),
'membership_id': membership_id
},
{
'$set': provided_body
}
)
except Exception as e:
raise ErtisError(
err_code="errors.errorOccurredWhileUpdatingUser",
err_msg="An error occurred while updating user with provided body",
status_code=500,
context={
'provided_body': provided_body
},
reason=str(e)
)
application = await self._find_application(application_id, membership_id)
return application
@staticmethod
def _check_identicality(resource, provided_body):
_resource = copy.deepcopy(resource)
resource.update(provided_body)
if resource == _resource:
raise ErtisError(
err_msg="Identical document error",
err_code="errors.identicalDocument",
status_code=409
)
return _resource
| 37.393443 | 109 | 0.602221 |
37df59b75c168508d5a2df12266cf64483ab8f43 | 19,010 | py | Python | zulip/integrations/jabber/jabber_mirror_backend.py | iishiishii/python-zulip-api | 8500a3238739a080e1809e204c54685437631457 | [
"Apache-2.0"
] | null | null | null | zulip/integrations/jabber/jabber_mirror_backend.py | iishiishii/python-zulip-api | 8500a3238739a080e1809e204c54685437631457 | [
"Apache-2.0"
] | null | null | null | zulip/integrations/jabber/jabber_mirror_backend.py | iishiishii/python-zulip-api | 8500a3238739a080e1809e204c54685437631457 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (C) 2013 Permabit, Inc.
# Copyright (C) 2013--2014 Zulip, Inc.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# The following is a table showing which kinds of messages are handled by the
# mirror in each mode:
#
# Message origin/type --> | Jabber | Zulip
# Mode/sender-, +-----+----+--------+----
# V | MUC | PM | stream | PM
# --------------+-------------+-----+----+--------+----
# | other sender| | x | |
# personal mode +-------------+-----+----+--------+----
# | self sender | | x | x | x
# ------------- +-------------+-----+----+--------+----
# | other sender| x | | |
# public mode +-------------+-----+----+--------+----
# | self sender | | | |
from typing import Dict, List, Set, Optional
import logging
import threading
import optparse
from sleekxmpp import ClientXMPP, InvalidJID, JID
from sleekxmpp.stanza import Message as JabberMessage
from sleekxmpp.exceptions import IqError, IqTimeout
from configparser import SafeConfigParser
import getpass
import os
import sys
import zulip
from zulip import Client
import re
from typing import Any, Callable
__version__ = "1.1"
def room_to_stream(room):
# type: (str) -> str
return room + "/xmpp"
def stream_to_room(stream):
# type: (str) -> str
return stream.lower().rpartition("/xmpp")[0]
def jid_to_zulip(jid):
# type: (JID) -> str
suffix = ''
if not jid.username.endswith("-bot"):
suffix = options.zulip_email_suffix
return "%s%s@%s" % (jid.username, suffix, options.zulip_domain)
def zulip_to_jid(email, jabber_domain):
# type: (str, str) -> JID
jid = JID(email, domain=jabber_domain)
if (options.zulip_email_suffix and
options.zulip_email_suffix in jid.username and
not jid.username.endswith("-bot")):
jid.username = jid.username.rpartition(options.zulip_email_suffix)[0]
return jid
class JabberToZulipBot(ClientXMPP):
def __init__(self, jid, password, rooms):
# type: (JID, str, List[str]) -> None
if jid.resource:
self.nick = jid.resource
else:
self.nick = jid.username
jid.resource = "zulip"
ClientXMPP.__init__(self, jid, password)
self.rooms = set() # type: Set[str]
self.rooms_to_join = rooms
self.add_event_handler("session_start", self.session_start)
self.add_event_handler("message", self.message)
self.zulip = None
self.use_ipv6 = False
self.register_plugin('xep_0045') # Jabber chatrooms
self.register_plugin('xep_0199') # XMPP Ping
def set_zulip_client(self, zulipToJabberClient):
# type: (ZulipToJabberBot) -> None
self.zulipToJabber = zulipToJabberClient
def session_start(self, event):
# type: (Dict[str, Any]) -> None
self.get_roster()
self.send_presence()
for room in self.rooms_to_join:
self.join_muc(room)
def join_muc(self, room):
# type: (str) -> None
if room in self.rooms:
return
logging.debug("Joining " + room)
self.rooms.add(room)
muc_jid = JID(local=room, domain=options.conference_domain)
xep0045 = self.plugin['xep_0045']
try:
xep0045.joinMUC(muc_jid, self.nick, wait=True)
except InvalidJID:
logging.error("Could not join room: " + str(muc_jid))
return
# Configure the room. Really, we should only do this if the room is
# newly created.
form = None
try:
form = xep0045.getRoomConfig(muc_jid)
except ValueError:
pass
if form:
xep0045.configureRoom(muc_jid, form)
else:
logging.error("Could not configure room: " + str(muc_jid))
def leave_muc(self, room):
# type: (str) -> None
if room not in self.rooms:
return
logging.debug("Leaving " + room)
self.rooms.remove(room)
muc_jid = JID(local=room, domain=options.conference_domain)
self.plugin['xep_0045'].leaveMUC(muc_jid, self.nick)
def message(self, msg):
# type: (JabberMessage) -> Any
try:
if msg["type"] == "groupchat":
return self.group(msg)
elif msg["type"] == "chat":
return self.private(msg)
else:
logging.warning("Got unexpected message type")
logging.warning(msg)
except Exception:
logging.exception("Error forwarding Jabber => Zulip")
def private(self, msg):
# type: (JabberMessage) -> None
if options.mode == 'public' or msg['thread'] == u'\u1FFFE':
return
sender = jid_to_zulip(msg["from"])
recipient = jid_to_zulip(msg["to"])
zulip_message = dict(
sender = sender,
type = "private",
to = recipient,
content = msg["body"],
)
ret = self.zulipToJabber.client.send_message(zulip_message)
if ret.get("result") != "success":
logging.error(str(ret))
def group(self, msg):
# type: (JabberMessage) -> None
if options.mode == 'personal' or msg["thread"] == u'\u1FFFE':
return
subject = msg["subject"]
if len(subject) == 0:
subject = "(no topic)"
stream = room_to_stream(msg['from'].local)
sender_nick = msg.get_mucnick()
if not sender_nick:
# Messages from the room itself have no nickname. We should not try
# to mirror these
return
jid = self.nickname_to_jid(msg.get_mucroom(), sender_nick)
sender = jid_to_zulip(jid)
zulip_message = dict(
forged = "yes",
sender = sender,
type = "stream",
subject = subject,
to = stream,
content = msg["body"],
)
ret = self.zulipToJabber.client.send_message(zulip_message)
if ret.get("result") != "success":
logging.error(str(ret))
def nickname_to_jid(self, room, nick):
# type: (str, str) -> JID
jid = self.plugin['xep_0045'].getJidProperty(room, nick, "jid")
if (jid is None or jid == ''):
return JID(local=nick.replace(' ', ''), domain=self.boundjid.domain)
else:
return jid
class ZulipToJabberBot(object):
def __init__(self, zulip_client):
# type: (Client) -> None
self.client = zulip_client
self.jabber = None # type: Optional[JabberToZulipBot]
def set_jabber_client(self, client):
# type: (JabberToZulipBot) -> None
self.jabber = client
def process_event(self, event):
# type: (Dict[str, Any]) -> None
if event['type'] == 'message':
message = event["message"]
if message['sender_email'] != self.client.email:
return
try:
if message['type'] == 'stream':
self.stream_message(message)
elif message['type'] == 'private':
self.private_message(message)
except Exception:
logging.exception("Exception forwarding Zulip => Jabber")
elif event['type'] == 'subscription':
self.process_subscription(event)
elif event['type'] == 'stream':
self.process_stream(event)
def stream_message(self, msg):
# type: (Dict[str, str]) -> None
assert(self.jabber is not None)
stream = msg['display_recipient']
if not stream.endswith("/xmpp"):
return
room = stream_to_room(stream)
jabber_recipient = JID(local=room, domain=options.conference_domain)
outgoing = self.jabber.make_message(
mto = jabber_recipient,
mbody = msg['content'],
mtype = 'groupchat')
outgoing['thread'] = u'\u1FFFE'
outgoing.send()
def private_message(self, msg):
# type: (Dict[str, Any]) -> None
assert(self.jabber is not None)
for recipient in msg['display_recipient']:
if recipient["email"] == self.client.email:
continue
if not recipient["is_mirror_dummy"]:
continue
recip_email = recipient['email']
jabber_recipient = zulip_to_jid(recip_email, self.jabber.boundjid.domain)
outgoing = self.jabber.make_message(
mto = jabber_recipient,
mbody = msg['content'],
mtype = 'chat')
outgoing['thread'] = u'\u1FFFE'
outgoing.send()
def process_subscription(self, event):
# type: (Dict[str, Any]) -> None
assert(self.jabber is not None)
if event['op'] == 'add':
streams = [s['name'].lower() for s in event['subscriptions']]
streams = [s for s in streams if s.endswith("/xmpp")]
for stream in streams:
self.jabber.join_muc(stream_to_room(stream))
if event['op'] == 'remove':
streams = [s['name'].lower() for s in event['subscriptions']]
streams = [s for s in streams if s.endswith("/xmpp")]
for stream in streams:
self.jabber.leave_muc(stream_to_room(stream))
def process_stream(self, event):
# type: (Dict[str, Any]) -> None
assert(self.jabber is not None)
if event['op'] == 'occupy':
streams = [s['name'].lower() for s in event['streams']]
streams = [s for s in streams if s.endswith("/xmpp")]
for stream in streams:
self.jabber.join_muc(stream_to_room(stream))
if event['op'] == 'vacate':
streams = [s['name'].lower() for s in event['streams']]
streams = [s for s in streams if s.endswith("/xmpp")]
for stream in streams:
self.jabber.leave_muc(stream_to_room(stream))
def get_rooms(zulipToJabber):
# type: (ZulipToJabberBot) -> List[str]
def get_stream_infos(key, method):
# type: (str, Callable[[], Dict[str, Any]]) -> Any
ret = method()
if ret.get("result") != "success":
logging.error(str(ret))
sys.exit("Could not get initial list of Zulip %s" % (key,))
return ret[key]
if options.mode == 'public':
stream_infos = get_stream_infos("streams", zulipToJabber.client.get_streams)
else:
stream_infos = get_stream_infos("subscriptions", zulipToJabber.client.list_subscriptions)
rooms = [] # type: List[str]
for stream_info in stream_infos:
stream = stream_info['name']
if stream.endswith("/xmpp"):
rooms.append(stream_to_room(stream))
return rooms
def config_error(msg):
# type: (str) -> None
sys.stderr.write("%s\n" % (msg,))
sys.exit(2)
if __name__ == '__main__':
parser = optparse.OptionParser(
epilog='''Most general and Jabber configuration options may also be specified in the
zulip configuration file under the jabber_mirror section (exceptions are noted
in their help sections). Keys have the same name as options with hyphens
replaced with underscores. Zulip configuration options go in the api section,
as normal.'''.replace("\n", " ")
)
parser.add_option(
'--mode',
default=None,
action='store',
help='''Which mode to run in. Valid options are "personal" and "public". In
"personal" mode, the mirror uses an individual users' credentials and mirrors
all messages they send on Zulip to Jabber and all private Jabber messages to
Zulip. In "public" mode, the mirror uses the credentials for a dedicated mirror
user and mirrors messages sent to Jabber rooms to Zulip. Defaults to
"personal"'''.replace("\n", " "))
parser.add_option(
'--zulip-email-suffix',
default=None,
action='store',
help='''Add the specified suffix to the local part of email addresses constructed
from JIDs and nicks before sending requests to the Zulip server, and remove the
suffix before sending requests to the Jabber server. For example, specifying
"+foo" will cause messages that are sent to the "bar" room by nickname "qux" to
be mirrored to the "bar/xmpp" stream in Zulip by user "qux+foo@example.com". This
option does not affect login credentials.'''.replace("\n", " "))
parser.add_option('-d', '--debug',
help='set logging to DEBUG. Can not be set via config file.',
action='store_const',
dest='log_level',
const=logging.DEBUG,
default=logging.INFO)
jabber_group = optparse.OptionGroup(parser, "Jabber configuration")
jabber_group.add_option(
'--jid',
default=None,
action='store',
help="Your Jabber JID. If a resource is specified, "
"it will be used as the nickname when joining MUCs. "
"Specifying the nickname is mostly useful if you want "
"to run the public mirror from a regular user instead of "
"from a dedicated account.")
jabber_group.add_option('--jabber-password',
default=None,
action='store',
help="Your Jabber password")
jabber_group.add_option('--conference-domain',
default=None,
action='store',
help="Your Jabber conference domain (E.g. conference.jabber.example.com). "
"If not specifed, \"conference.\" will be prepended to your JID's domain.")
jabber_group.add_option('--no-use-tls',
default=None,
action='store_true')
jabber_group.add_option('--jabber-server-address',
default=None,
action='store',
help="The hostname of your Jabber server. This is only needed if "
"your server is missing SRV records")
jabber_group.add_option('--jabber-server-port',
default='5222',
action='store',
help="The port of your Jabber server. This is only needed if "
"your server is missing SRV records")
parser.add_option_group(jabber_group)
parser.add_option_group(zulip.generate_option_group(parser, "zulip-"))
(options, args) = parser.parse_args()
logging.basicConfig(level=options.log_level,
format='%(levelname)-8s %(message)s')
if options.zulip_config_file is None:
default_config_file = zulip.get_default_config_filename()
if default_config_file is not None:
config_file = default_config_file
else:
config_error("Config file not found via --zulip-config_file or environment variable.")
else:
config_file = options.zulip_config_file
config = SafeConfigParser()
try:
with open(config_file, 'r') as f:
config.readfp(f, config_file)
except IOError:
pass
for option in ("jid", "jabber_password", "conference_domain", "mode", "zulip_email_suffix",
"jabber_server_address", "jabber_server_port"):
if (getattr(options, option) is None and
config.has_option("jabber_mirror", option)):
setattr(options, option, config.get("jabber_mirror", option))
for option in ("no_use_tls",):
if getattr(options, option) is None:
if config.has_option("jabber_mirror", option):
setattr(options, option, config.getboolean("jabber_mirror", option))
else:
setattr(options, option, False)
if options.mode is None:
options.mode = "personal"
if options.zulip_email_suffix is None:
options.zulip_email_suffix = ''
if options.mode not in ('public', 'personal'):
config_error("Bad value for --mode: must be one of 'public' or 'personal'")
if None in (options.jid, options.jabber_password):
config_error("You must specify your Jabber JID and Jabber password either "
"in the Zulip configuration file or on the commandline")
zulipToJabber = ZulipToJabberBot(zulip.init_from_options(options, "JabberMirror/" + __version__))
# This won't work for open realms that don't have a consistent domain
options.zulip_domain = zulipToJabber.client.email.partition('@')[-1]
try:
jid = JID(options.jid)
except InvalidJID as e:
config_error("Bad JID: %s: %s" % (options.jid, e.message))
if options.conference_domain is None:
options.conference_domain = "conference.%s" % (jid.domain,)
xmpp = JabberToZulipBot(jid, options.jabber_password, get_rooms(zulipToJabber))
address = None
if options.jabber_server_address:
address = (options.jabber_server_address, options.jabber_server_port)
if not xmpp.connect(use_tls=not options.no_use_tls, address=address):
sys.exit("Unable to connect to Jabber server")
xmpp.set_zulip_client(zulipToJabber)
zulipToJabber.set_jabber_client(xmpp)
xmpp.process(block=False)
if options.mode == 'public':
event_types = ['stream']
else:
event_types = ['message', 'subscription']
try:
logging.info("Connecting to Zulip.")
zulipToJabber.client.call_on_each_event(zulipToJabber.process_event,
event_types=event_types)
except BaseException as e:
logging.exception("Exception in main loop")
xmpp.abort()
sys.exit(1)
| 39.115226 | 108 | 0.593425 |
5c901f90e72841f76661eedc6ff417824b9d323a | 1,917 | py | Python | tests/meltano/api/controllers/test_repos.py | siilats/meltano | 404605c83f441c3fc2b729e26416c6caa8b0ed0b | [
"MIT"
] | 122 | 2021-06-21T17:30:29.000Z | 2022-03-25T06:21:38.000Z | tests/meltano/api/controllers/test_repos.py | siilats/meltano | 404605c83f441c3fc2b729e26416c6caa8b0ed0b | [
"MIT"
] | null | null | null | tests/meltano/api/controllers/test_repos.py | siilats/meltano | 404605c83f441c3fc2b729e26416c6caa8b0ed0b | [
"MIT"
] | 21 | 2021-06-22T10:08:15.000Z | 2022-03-18T08:57:02.000Z | import json
import pytest
from flask import url_for
def assert_has_items(entry, count):
return len(entry["items"]) == count
@pytest.mark.usefixtures("add_model", "seed_users")
class TestRepos:
def test_index(self, api, app):
with app.test_request_context():
res = api.get(url_for("repos.index"))
payload = res.json
assert_has_items(payload["tables"], 3)
assert_has_items(payload["topics"], 1)
assert_has_items(payload["dashboards"], 0)
assert_has_items(payload["documents"], 1)
def test_model_index(self, api, app):
with app.test_request_context():
res = api.get(url_for("repos.model_index"))
payload = res.json
# we have topics
topic_identifiers = payload.keys()
assert topic_identifiers
assert "model-carbon-intensity/carbon" in topic_identifiers
assert "model-gitlab/gitlab" in topic_identifiers
# each topic has a name, a namespace and designs
for topic_def in payload.values():
assert topic_def["namespace"]
assert topic_def["name"]
assert topic_def["designs"]
def test_model_design(self, api, app):
with app.test_request_context():
res = api.get(
url_for(
"repos.model_design",
namespace="model-carbon-intensity",
topic_name="carbon",
design_name="region",
)
)
json_data = json.loads(res.data)
assert "description" in json_data
assert "from" in json_data
assert "graph" in json_data
assert "joins" in json_data
assert "label" in json_data
assert "name" in json_data
assert "related_table" in json_data
assert json_data["from"] == "region"
assert json_data["name"] == "region"
| 29.492308 | 67 | 0.598852 |
de75c0cccb6012e4c804f7f982768a68b6afe5b1 | 282 | py | Python | abc/abc140/abc140b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc140/abc140b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc140/abc140b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | N = int(input())
A = [int(e) - 1 for e in input().split()]
B = list(map(int, input().split()))
C = list(map(int, input().split()))
before = N
result = 0
for i in range(N):
result += B[A[i]]
if before + 1 == A[i]:
result += C[before]
before = A[i]
print(result)
| 20.142857 | 41 | 0.535461 |
0f6dcb1350c691c65fe36fc4df05ace275c8e099 | 520 | py | Python | idb/ipc/crash_delete.py | sergey-plevako-badoo/FBSimulatorControl | 117af8508ba7405bdbacd29ec95a0523b3926ad3 | [
"MIT"
] | 1 | 2019-06-12T16:46:25.000Z | 2019-06-12T16:46:25.000Z | idb/ipc/crash_delete.py | BalestraPatrick/idb | 9deac2af129e7595c303c121944034c556202454 | [
"MIT"
] | null | null | null | idb/ipc/crash_delete.py | BalestraPatrick/idb | 9deac2af129e7595c303c121944034c556202454 | [
"MIT"
] | 1 | 2021-08-20T08:04:16.000Z | 2021-08-20T08:04:16.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import List
from idb.common.types import CrashLogInfo, CrashLogQuery
from idb.grpc.types import CompanionClient
from idb.ipc.mapping.crash import _to_crash_log_info_list, _to_crash_log_query_proto
async def client(client: CompanionClient, query: CrashLogQuery) -> List[CrashLogInfo]:
response = await client.stub.crash_delete(_to_crash_log_query_proto(query))
return _to_crash_log_info_list(response)
| 37.142857 | 86 | 0.813462 |
77b881b3375fb28268330303ec7e3a5f94c04e90 | 963 | py | Python | azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/gateway_route_list_result_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/gateway_route_list_result_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/gateway_route_list_result_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-08-28T14:36:47.000Z | 2018-08-28T14:36:47.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GatewayRouteListResult(Model):
"""List of virtual network gateway routes.
:param value: List of gateway routes
:type value: list[~azure.mgmt.network.v2018_07_01.models.GatewayRoute]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[GatewayRoute]'},
}
def __init__(self, *, value=None, **kwargs) -> None:
super(GatewayRouteListResult, self).__init__(**kwargs)
self.value = value
| 33.206897 | 76 | 0.589823 |
1d48687873c6bfed350fdd12ee20de746c8dfa3a | 23,514 | py | Python | onu.py | jfcsantos/python-threads | b2b96e556b987400a4dcc5d59249065d0f865557 | [
"MIT"
] | null | null | null | onu.py | jfcsantos/python-threads | b2b96e556b987400a4dcc5d59249065d0f865557 | [
"MIT"
] | null | null | null | onu.py | jfcsantos/python-threads | b2b96e556b987400a4dcc5d59249065d0f865557 | [
"MIT"
] | null | null | null | from datetime import datetime
import time
import sys
import threading
import json
import requests
from BeautifulSoup import BeautifulSoup
from pprint import pprint
from inspect import currentframe, getframeinfo
import onu_logging
class jobThread (threading.Thread):
def __init__(self, threadLevel, parent, job, jobID, subJobID = None):
threading.Thread.__init__(self)
self.job = job
self.jobID = jobID
self.parent = parent
self.subJobID = subJobID
self.threadLevel = threadLevel #set the threading level to handle thread number limitations
def run(self):
#Log the starting time of the thread
self.timeStart = datetime.now()
global globalConnCounter, runSingleJob
jobID = self.jobID
subJobID = self.subJobID
if not subJobID is None:
jobID = self.subJobID
#Use this var to decide if child thread can run according to parent statuses
threadCanRun = True
#Verify if this job was called by another job
if self.parent > 0 and not stopThreads:
try:
#Check for multiple dependencies
depArr = check_multiple_dependencies(jobID)
except Exception as e:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Could not load dependencies : %s" % e, frameinfo.lineno)
#Wait for the parent job thread to finish
self.parent.join()
#Go through each parent job
for parID in depArr:
#If the parent hasn't finished yet we wait until it does so the child can proceed
if int(parID) not in finishedJobs:
onu_logging.logMessage("Job "+str(jobID) + " waiting for parent: "+ str(parID) +" to finish")
while int(parID) not in finishedJobs and not stopThreads:
pass
#If the parent has finished but without success we don't allow the child to proceed
if int(parID) in finishedJobs and (finishedJobs[int(parID)] != "s"):
threadCanRun = False
if threadCanRun and not stopThreads:
onu_logging.logMessage("Starting job " + str(jobID))
onu_logging.logMessage(str(datetime.now()))
timeoutCounter = 3
jobResult = None
job_res = None
#Run job and catch any exception. Retry 3 times and then exit
#for timeoutCounter in range(1,3):
while True:
try:
#pprint(self.job)
job_res = run_job(self.job["url"])
jobResult = json.loads(job_res.text)
break
except requests.exceptions.Timeout as e:
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Response timed out, retrying... : %s" % e, frameinfo.lineno)
else:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Retry limit, aborting main job", frameinfo.lineno)
break
except (requests.exceptions.ConnectionError, requests.exceptions.RequestException) as e: #Handles connection error
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Connection refused, retrying... : %s" % e, frameinfo.lineno)
time.sleep(10)
if "403" in str(e):
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Session lost, trying to create a new one..." , frameinfo.lineno)
authResp = auth_admin_user()
else:
onu_logging.logMessage("Retry limit, aborting main job")
break
except Exception as e:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Could not load response : %s" % e , frameinfo.lineno)
onu_logging.logJsonMessage("JOB %s:\n" % jobID, self.job)
onu_logging.logJsonMessage("",jobResult)
break
if ( (job_res and jobResult != None) and job_res.status_code == requests.codes.ok):
#jobResult = json.loads(job_res.text)
if 'result' in jobResult:
onu_logging.logMessage("RESULT : "+ str(jobID) + ": " + jobResult['result'])
if jobResult['result'] == 'e':
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage(jobResult['msg'], frameinfo.lineno)
isThreadable = int(self.job["threadable"])
#If job is threadable we're gonna have an array of ID's in the result to create subJobs
#Additionally, if we need extra threading (cacheId, etc) the field "threadable" will be in the array as well
if isThreadable == 1 and not stopThreads:
if 'threadable' in jobResult:
subJobsThreadable = jobResult['threadable']
else:
subJobsThreadable = '0'
run_multiple_sub_jobs(self.job["url"], jobResult['subJobs'], subJobsThreadable, self.threadLevel)
#create threads for the jobs dependent on this job
# parent = -1 means these are sub jobs
#if parent is > -1:
# create_new_threads(self,jobDep)
if jobID > 0 and not stopThreads:
if jobID in depThreadList:
onu_logging.logMessage("Job "+ str(jobID) + " was removed from the dependent list")
del depThreadList[jobID]
if 'result' in jobResult:
jobResult = jobResult['result']
finishedJobs[jobID] = jobResult
dependencies = None
if subJobID is None:
timeoutCounter = 3
#for timeoutCounter in range(1,3):
while True:
try:
dependencies = fetch_dependencies(jobID)
break
except requests.exceptions.Timeout as e:
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Response timed out, retrying... : %s" % e, frameinfo.lineno)
else:
onu_logging.logMessage("Retry limit, aborting main job")
break
except (requests.exceptions.ConnectionError, requests.exceptions.RequestException) as e: #Handles connection error
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Connection refused, retrying... : %s" % e, frameinfo.lineno)
time.sleep(10)
if "403" in str(e):
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Session lost, trying to create a new one..." , frameinfo.lineno)
authResp = auth_admin_user()
else:
onu_logging.logMessage("Retry limit, aborting main job")
break
except Exception as e:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Could not fetch dependencies : %s" % e, frameinfo.lineno)
break
if not dependencies is None:
create_new_threads(self.threadLevel, self,dependencies)
else:
onu_logging.logMessage("Job "+str(jobID) + " failed")
finishedJobs[jobID] = 'f'
else:
onu_logging.logMessage("Child thread cannot run because parent hasn't finished successfully")
onu_logging.logMessage("Exiting job " + str(jobID))
globalConnCounter = globalConnCounter - 1
onu_logging.logMessage("Number of global threads: {0}".format(globalConnCounter))
self.timeEnd = datetime.now()
tdelta = self.timeEnd - self.timeStart
seconds = tdelta.total_seconds()
#if isinstance(jobID, int):
#print "Job "+ str(jobID) + " took "+ datetime.fromtimestamp(int(seconds)).strftime('%H:%M:%S')
#else:
message = "Job "+ str(jobID) + " took "+ str(seconds) +"\n"
message += "ended at: " + str(datetime.now())
onu_logging.logMessage(message)
###################START run_job########################
#Run the cron job by calling the url
def run_job(url):
fullURL = serverBaseURL + str(url)
result = session.get(fullURL, stream=False, verify = False, timeout=1800)
#session.close()
result.raise_for_status()
return result
###################END run_job########################
###################START run_multiple_sub_jobs########################
def run_multiple_sub_jobs(url,subJobs,threadable,threadLevel):
subThreads = []
global globalConnCounter
subJobs = json.loads(subJobs)
if isinstance(subJobs,(list)):
subJobsIter = enumerate(subJobs)
elif isinstance(subJobs,(dict)):
subJobsIter = subJobs.iteritems()
subJobsThreadCounter = 0
for key, firstSubId in subJobsIter:
#firstSubId should never be empty
if firstSubId:
#if is array fetch firstSubId and secondSubId id
if not isinstance(subJobs[key], (basestring,int)):
for secondSubId in subJobs[key]:
#Need this kind of loop because the threads immediately take the open slots and we don't wanna lose any job in this gap
while True:
if globalConnCounter < globalConn or threadLevel > 1:
job = {"url":url+"/"+key+"/"+secondSubId, "threadable":threadable}
thread = jobThread(threadLevel + 1, -1, job,-1, url+"/"+key+"/"+secondSubId)
thread.daemon = True
globalConnCounter = globalConnCounter + 1
thread.start()
#In order to limit the lower threading level to 1 we wait for the created thread to finish with join()
if threadLevel > 1:
thread.join()
else: #if not we just add to the subThreads list and wait later
subThreads.append(thread)
break
else :
#print "NO CONNECTION SLOT: "+str(firstSubId)+"/"+str(secondSubId)
pass
else:
while True:
if globalConnCounter < globalConn or threadLevel > 1:
job = {"url":url+"/"+firstSubId, "threadable":threadable}
thread = jobThread(threadLevel + 1, -1, job,-1, url+"/"+firstSubId)
thread.daemon = True
globalConnCounter = globalConnCounter + 1
thread.start()
#for the first level of subThreads
if threadLevel < 4:
subThreads.append(thread)
if threadLevel > 3:
#probably a bad way to handle this but:
#if we have more than 1 subjobs at level 2, by getting here we started the 3rd one so we wait.
if subJobsThreadCounter > 1:
thread.join()
subJobsThreadCounter -= 1
else:
subJobsThreadCounter += 1
subThreads.append(thread)
break
else :
#print "NO CONNECTION SLOT: "+str(firstSubId)+"/"+str(secondSubId)
pass
for t in subThreads:
t.join()
###################END run_multiple_sub_jobs########################
###################START create_new_threads########################
#Creates new threads from the list of jobs
def create_new_threads(threadLevel,parent,jobs):
global globalConnCounter
for (jobID, job) in jobs.items():
jobID = int(jobID)
thread = jobThread(threadLevel, parent, job, jobID)
thread.daemon = True
if int(job["enabled"]) == 1:
#In order to overload the server with requests we limit the number of active threads
while globalConnCounter > globalConn - 1:
#print "Job "+ str(jobID) + " waiting for connection slot!"
pass
#For multiple dependencies, check if new job wasn't or isn't being processed already
if jobID not in finishedJobs and jobID not in depThreadList:
if not globalConnCounter is globalConn:
depThreadList[jobID] = thread
globalConnCounter = globalConnCounter + 1
thread.start()
###################END create_new_threads########################
###################START fetch_dependencies########################
#Fetch the dependencies of job: jobID
def fetch_dependencies(jobID):
onu_logging.logMessage("Fetching jobs dependent on job " + str(jobID))
if jobID is -1 :
jobID = ""
#Create the full url to be called:
# - /dependency retrieves all the jobs with no dependencies
# - /depency/jobID/ gives the jobs which depend on jobID
fullURL = serverBaseURL + "/dependency/" + str(jobID)
#Get the dependencies through the session
#Raise exceptions
depResponse = session.get(fullURL, verify = False, timeout=1800)
depResponse.raise_for_status()
#First convert the HTML response text to a clean JSON object
dependencies = json.loads(depResponse.text)
message = "Job " + str(jobID) + " dependencies: "
onu_logging.logJsonMessage(message, dependencies)
return dependencies
###################END fetch_dependencies########################
###################START check_multiple_dependencies########################
#Check if a job depends on multiple other jobs and returns the list
def check_multiple_dependencies(jobID):
fullURL = serverBaseURL + "/dependee/" + str(jobID)
result = session.get(fullURL, verify = False, timeout=1800)
result.raise_for_status()
parArray = json.loads(result.text)
return parArray
###################END check_multiple_dependencies########################
###################END fetch single job########################
def fetch_job(jobID):
fullURL = serverBaseURL + "/job/" + str(jobID)
result = session.get(fullURL, verify = False, timeout=1800)
result.raise_for_status()
parArray = json.loads(result.text)
return parArray
###################END check_multiple_dependencies########################
###################START clean_logs########################
def clean_logs():
fullURL = serverBaseURL + "/clean-logs"
result = session.get(fullURL, verify = False, timeout=1800)
result.raise_for_status()
return result
###################START clean_logs########################
###################START auth_admin_user########################
#Authenticates admin user session to access overnight updates urls
def auth_admin_user():
global adminUser, adminPwd
fullUrl = serverBaseURL + '/user/login' #Pass the appropriate data to the user/login form
loginData= {
'name': adminUser,
'pass': adminPwd,
'form_id': "user_login",
}
print(fullUrl)
#Post the data to the form and save the response.
#Raise exceptions
loginResponse = session.post(fullUrl, data=loginData, verify = False, timeout=1800)
#loginResponse.raise_for_status()
return loginResponse
###################END auth_admin_user########################
###################START set_base_url########################
#Authenticates admin user session to access overnight updates urls
def set_base_url(environment):
global adminPwd
baseURL = str(environment)
adminPwd = ""
if environment == "prod":
baseURL = ""
adminPwd = ""
if environment == "dev":
baseURL = ""
adminPwd = ""
return baseURL
###################END set_base_url########################
def stopProcesses():
global stopThreads
stopThreads = True
while finished == False:
pass
stopThreads = False
finished = False
serverBaseURL = ""
adminUser = ""
adminPwd = ""
globalConnCounter = 0
globalConn = 13 #Limits the number of overall connections
session = requests.session() #Session object used for all requests
#These two lists are used to manage multiple dependencies, they wouldn't be necessary if we didn't have them
depThreadList = {} #
finishedJobs = {}
bufferLog = ""
#To run: overnightupdates environmentName jobId
# No jobId runs all
#if __name__ == "__main__":
def startOnu(environment, jobID):
global bufferLog, serverBaseURL, finished
timeStart = datetime.now()
serverBaseURL = set_base_url(environment)
#Handle critical connection errors: 4xx, 5xx
#Failed auth is verified manually in the response
try:
authResp = auth_admin_user()
except Exception as e: #Takes care of all the other exceptions
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Admin authentication: failed, aborting Main thread! : %s" % e, frameinfo.lineno)
exit()
#Using this method to search the response text and find if the login was succesfull
#Used inwith a Drupal website, can be altered to fit any other
soup = BeautifulSoup(authResp.text) #Fetch the response page HTML
loginFailed = soup.find("body", "not-logged-in") #Login failed if the page has the 'not-logged-in' class
#Continue the process if the user is logged in
if not loginFailed:
onu_logging.logMessage("Admin authentication: success")
try:
clean_logs() #Clean watchdog logs related to overnight updates
onu_logging.logMessage("Cleaned old logs")
except Exception as e:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Couldn't clean old logs...continuing anyway : %s" % e, frameinfo.lineno)
dependencies = []
timeoutCounter = 3
if jobID is -1:
while True:
try:
dependencies = fetch_dependencies(jobID)
break
except requests.exceptions.Timeout as e:
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Response timed out, retrying : %s" % e, frameinfo.lineno)
else:
onu_logging.logMessage("Retry limit, aborting main job")
break
except (requests.exceptions.ConnectionError, requests.exceptions.RequestException) as e: #Handles connection error
if timeoutCounter > 0 :
timeoutCounter = timeoutCounter - 1
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Connection refused, retrying : %s" % e, frameinfo.lineno)
time.sleep(10)
if "403" in str(e):
onu_logging.logMessage("Session lost, trying to create a new one...")
authResp = auth_admin_user()
else:
onu_logging.logMessage("Retry limit, aborting main job")
break
except Exception as e: #Takes care of all the other exceptions
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Could not load response : %s" % e, frameinfo.lineno)
break
else:
try:
job = fetch_job(jobID)
if int(job["enabled"]) == 1:
thread = jobThread(1,0, job, jobID)
thread.daemon = True
depThreadList[jobID] = thread
globalConnCounter = globalConnCounter + 1
thread.start()
except Exception as e:
frameinfo = getframeinfo(currentframe())
onu_logging.logErrorMessage("Couldn't fetch job : %s" % e, frameinfo.lineno)
if len(dependencies) > 0 and jobID is -1:
onu_logging.logMessage("Creating main jobs threads.\n")
create_new_threads(1, 0,dependencies)
#Do not terminate program until the only active thread is Main
while threading.active_count() > 1:
terminate = True
for thread in threading.enumerate():
if(thread.__class__.__name__ == 'jobThread'):
terminate = False
if terminate:
finished = True
break
pass
else:
onu_logging.logMessage("Admin authentication failed, aborting execution!")
timeEnd = datetime.now()
tdelta = timeEnd - timeStart
seconds = tdelta.total_seconds()
#print "Main thread took "+ datetime.fromtimestamp(int(seconds)).strftime('%H:%M:%S')
onu_logging.logMessage("Main thread ended at " + str(datetime.now()))
#print "Job results overview: \n"
#pprint(finishedJobs)
onu_logging.logMessage("Main thread finished...exiting")
if __name__ == "__main__":
args = sys.argv
if len(args) > 1:
environment = str(args[1])
else:
print "Environment name not entered, exiting..."
exit()
jobID = -1 #If no arguments are passed we fetch all the jobs with no dependencies
if len(args) > 2:
jobID = int(args[2]) #If script is called with arguments means we're calling a specific job
startOnu(environment, jobID) | 42.21544 | 146 | 0.546355 |
06c1f29b631c3d5e7c1a0679d9060e4e4b86ca82 | 4,389 | py | Python | torchvision/prototype/models/quantization/shufflenetv2.py | ethanwhite/vision | 348f75ceb5b971dda7a2695c285bd5f8d4277069 | [
"BSD-3-Clause"
] | null | null | null | torchvision/prototype/models/quantization/shufflenetv2.py | ethanwhite/vision | 348f75ceb5b971dda7a2695c285bd5f8d4277069 | [
"BSD-3-Clause"
] | null | null | null | torchvision/prototype/models/quantization/shufflenetv2.py | ethanwhite/vision | 348f75ceb5b971dda7a2695c285bd5f8d4277069 | [
"BSD-3-Clause"
] | null | null | null | import warnings
from functools import partial
from typing import Any, List, Optional, Union
from torchvision.prototype.transforms import ImageNetEval
from torchvision.transforms.functional import InterpolationMode
from ....models.quantization.shufflenetv2 import (
QuantizableShuffleNetV2,
_replace_relu,
quantize_model,
)
from .._api import Weights, WeightEntry
from .._meta import _IMAGENET_CATEGORIES
from ..shufflenetv2 import ShuffleNetV2_x0_5Weights, ShuffleNetV2_x1_0Weights
__all__ = [
"QuantizableShuffleNetV2",
"QuantizedShuffleNetV2_x0_5Weights",
"QuantizedShuffleNetV2_x1_0Weights",
"shufflenet_v2_x0_5",
"shufflenet_v2_x1_0",
]
def _shufflenetv2(
stages_repeats: List[int],
stages_out_channels: List[int],
weights: Optional[Weights],
progress: bool,
quantize: bool,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
if weights is not None:
kwargs["num_classes"] = len(weights.meta["categories"])
if "backend" in weights.meta:
kwargs["backend"] = weights.meta["backend"]
backend = kwargs.pop("backend", "fbgemm")
model = QuantizableShuffleNetV2(stages_repeats, stages_out_channels, **kwargs)
_replace_relu(model)
if quantize:
quantize_model(model, backend)
if weights is not None:
model.load_state_dict(weights.state_dict(progress=progress))
return model
_common_meta = {
"size": (224, 224),
"categories": _IMAGENET_CATEGORIES,
"interpolation": InterpolationMode.BILINEAR,
"backend": "fbgemm",
"quantization": "ptq",
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#post-training-quantized-models",
}
class QuantizedShuffleNetV2_x0_5Weights(Weights):
ImageNet1K_FBGEMM_Community = WeightEntry(
url="https://download.pytorch.org/models/quantized/shufflenetv2_x0.5_fbgemm-00845098.pth",
transforms=partial(ImageNetEval, crop_size=224),
meta={
**_common_meta,
"unquantized": ShuffleNetV2_x0_5Weights.ImageNet1K_Community,
"acc@1": 57.972,
"acc@5": 79.780,
},
)
class QuantizedShuffleNetV2_x1_0Weights(Weights):
ImageNet1K_FBGEMM_Community = WeightEntry(
url="https://download.pytorch.org/models/quantized/shufflenetv2_x1_fbgemm-db332c57.pth",
transforms=partial(ImageNetEval, crop_size=224),
meta={
**_common_meta,
"unquantized": ShuffleNetV2_x1_0Weights.ImageNet1K_Community,
"acc@1": 68.360,
"acc@5": 87.582,
},
)
def shufflenet_v2_x0_5(
weights: Optional[Union[QuantizedShuffleNetV2_x0_5Weights, ShuffleNetV2_x0_5Weights]] = None,
progress: bool = True,
quantize: bool = False,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
if "pretrained" in kwargs:
warnings.warn("The argument pretrained is deprecated, please use weights instead.")
if kwargs.pop("pretrained"):
weights = (
QuantizedShuffleNetV2_x0_5Weights.ImageNet1K_FBGEMM_Community
if quantize
else ShuffleNetV2_x0_5Weights.ImageNet1K_Community
)
else:
weights = None
if quantize:
weights = QuantizedShuffleNetV2_x0_5Weights.verify(weights)
else:
weights = ShuffleNetV2_x0_5Weights.verify(weights)
return _shufflenetv2([4, 8, 4], [24, 48, 96, 192, 1024], weights, progress, quantize, **kwargs)
def shufflenet_v2_x1_0(
weights: Optional[Union[QuantizedShuffleNetV2_x1_0Weights, ShuffleNetV2_x1_0Weights]] = None,
progress: bool = True,
quantize: bool = False,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
if "pretrained" in kwargs:
warnings.warn("The argument pretrained is deprecated, please use weights instead.")
if kwargs.pop("pretrained"):
weights = (
QuantizedShuffleNetV2_x1_0Weights.ImageNet1K_FBGEMM_Community
if quantize
else ShuffleNetV2_x1_0Weights.ImageNet1K_Community
)
else:
weights = None
if quantize:
weights = QuantizedShuffleNetV2_x1_0Weights.verify(weights)
else:
weights = ShuffleNetV2_x1_0Weights.verify(weights)
return _shufflenetv2([4, 8, 4], [24, 116, 232, 464, 1024], weights, progress, quantize, **kwargs)
| 32.272059 | 117 | 0.683299 |
628303b5c84182334e2499920420c16c758f076e | 113 | py | Python | Zadaniy/Examples/primer10.py | Dmitry-15/17_laba | adfa9fd9a10e1e8ed1479e0829987c3d86b14ca0 | [
"MIT"
] | null | null | null | Zadaniy/Examples/primer10.py | Dmitry-15/17_laba | adfa9fd9a10e1e8ed1479e0829987c3d86b14ca0 | [
"MIT"
] | null | null | null | Zadaniy/Examples/primer10.py | Dmitry-15/17_laba | adfa9fd9a10e1e8ed1479e0829987c3d86b14ca0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
if __name__ == "__main__":
os.remove("file3.txt") | 14.125 | 26 | 0.60177 |
d87c06bf3d696d54702c5de3db638e8353dabed1 | 17,310 | py | Python | moler/connection_observer.py | nokia/moler | 13cb3d1329a8904ac074f269b8c9ec1955e3ae30 | [
"BSD-3-Clause"
] | 57 | 2018-02-20T08:16:47.000Z | 2022-03-28T10:36:57.000Z | moler/connection_observer.py | nokia/moler | 13cb3d1329a8904ac074f269b8c9ec1955e3ae30 | [
"BSD-3-Clause"
] | 377 | 2018-07-19T11:56:27.000Z | 2021-07-09T13:08:12.000Z | moler/connection_observer.py | nokia/moler | 13cb3d1329a8904ac074f269b8c9ec1955e3ae30 | [
"BSD-3-Clause"
] | 24 | 2018-04-14T20:49:40.000Z | 2022-03-29T10:44:26.000Z | # -*- coding: utf-8 -*-
__author__ = 'Grzegorz Latuszek, Marcin Usielski, Michal Ernst'
__copyright__ = 'Copyright (C) 2018-2021 Nokia'
__email__ = 'grzegorz.latuszek@nokia.com, marcin.usielski@nokia.com, michal.ernst@nokia.com'
import logging
import threading
import time
from abc import abstractmethod, ABCMeta
from six import add_metaclass
from moler.exceptions import ConnectionObserverNotStarted
from moler.exceptions import ConnectionObserverTimeout
from moler.exceptions import NoConnectionProvided
from moler.exceptions import NoResultSinceCancelCalled
from moler.exceptions import ResultAlreadySet
from moler.exceptions import ResultNotAvailableYet
from moler.exceptions import WrongUsage
from moler.helpers import ClassProperty
from moler.helpers import camel_case_to_lower_case_underscore
from moler.helpers import instance_id
from moler.helpers import copy_list
from moler.util.connection_observer import exception_stored_if_not_main_thread
from moler.util.loghelper import log_into_logger
from moler.runner_factory import get_runner
from moler.command_scheduler import CommandScheduler
from moler.util.connection_observer_life_status import ConnectionObserverLifeStatus
@add_metaclass(ABCMeta)
class ConnectionObserver(object):
_not_raised_exceptions = list() # list of dict: "exception" and "time"
_exceptions_lock = threading.Lock()
def __init__(self, connection=None, runner=None):
"""
Create instance of ConnectionObserver class
:param connection: connection used to receive data awaited for
"""
super(ConnectionObserver, self).__init__()
self.life_status = ConnectionObserverLifeStatus()
self.connection = connection
self.runner = self._get_runner(runner=runner)
self._result = None
self._exception = None
self._future = None
self.device_logger = logging.getLogger('moler.{}'.format(self.get_logger_name()))
self.logger = logging.getLogger('moler.connection.{}'.format(self.get_logger_name()))
def _get_runner(self, runner):
"""
:param runner: Runner
:return: Runner instance
"""
return_runner = runner
if return_runner is None and self.connection is not None:
return_runner = self.connection.get_runner()
if return_runner is None:
return_runner = get_runner()
return return_runner
def __str__(self):
return '{}(id:{})'.format(self.__class__.__name__, instance_id(self))
__base_str = __str__
def __repr__(self):
cmd_str = self.__str__()
connection_str = '<NO CONNECTION>'
if self.connection:
connection_str = repr(self.connection)
return '{}, using {})'.format(cmd_str[:-1], connection_str)
def __call__(self, timeout=None, *args, **kwargs):
"""
Run connection-observer in foreground
till it is done or timeouted
CAUTION: if you call it from asynchronous code (async def) you may block events loop for long time.
You should rather await it via:
result = await connection_observer
or (to have timeout)
result = await asyncio.wait_for(connection_observer, timeout=10)
or you may delegate blocking call execution to separate thread,
see: https://pymotw.com/3/asyncio/executors.html
"""
self.start(timeout, *args, **kwargs)
# started_observer = self.start(timeout, *args, **kwargs)
# if started_observer:
# return started_observer.await_done(*args, **kwargs)
return self.await_done()
# TODO: raise ConnectionObserverFailedToStart
@property
def _is_done(self):
return self.life_status.is_done
@_is_done.setter
def _is_done(self, value):
self.life_status.is_done = value
if value:
CommandScheduler.dequeue_running_on_connection(connection_observer=self)
@property
def _is_cancelled(self):
return self.life_status.is_cancelled
@_is_cancelled.setter
def _is_cancelled(self, value):
self.life_status.is_cancelled = value
@property
def terminating_timeout(self):
return self.life_status.terminating_timeout
@terminating_timeout.setter
def terminating_timeout(self, value):
self.life_status.terminating_timeout = value
@property
def timeout(self):
return self.life_status.timeout
@timeout.setter
def timeout(self, value):
# levels_to_go_up=2 : extract caller info to log where .timeout=XXX has been called from
self._log(logging.DEBUG, "Setting {} timeout to {} [sec]".format(ConnectionObserver.__base_str(self), value),
levels_to_go_up=2)
self.life_status.timeout = value
@property
def start_time(self):
return self.life_status.start_time
def get_logger_name(self):
if self.connection and hasattr(self.connection, "name"):
return self.connection.name
else:
return self.__class__.__name__
def start(self, timeout=None, *args, **kwargs):
"""Start background execution of connection-observer."""
with exception_stored_if_not_main_thread(self):
if timeout:
self.timeout = timeout
self._validate_start(*args, **kwargs)
# After start we treat it as started even if it's underlying
# parallelism machinery (threads, coroutines, ...) has not started yet
# (thread didn't get control, coro didn't start in async-loop)
# That is so, since observer lifetime starts with it's timeout-clock
# and timeout is counted from calling observer.start()
self.life_status._is_running = True
self.life_status.start_time = time.time()
# Besides not started parallelism machinery causing start-delay
# we can have start-delay caused by commands queue on connection
# (can't submit command to background-run till previous stops running)
CommandScheduler.enqueue_starting_on_connection(connection_observer=self)
# Above line will set self._future when it is possible to submit
# observer to background-run (observer not command, or empty commands queue)
# or setting self._future will be delayed by nonempty commands queue.
return self
def _validate_start(self, *args, **kwargs):
# check base class invariants first
if self.done():
raise WrongUsage("You can't run same {} multiple times. It is already done.".format(self))
if not self.connection:
# only if we have connection we can expect some data on it
# at the latest "just before start" we need connection
raise NoConnectionProvided(self)
# ----------------------------------------------------------------------
# We intentionally do not check if connection is open here.
# In such case net result anyway will be failed/timeouted observer -
# - so, user will need to investigate "why".
# Checking connection state would benefit in early detection of:
# "error condition - no chance to succeed since connection is closed".
# However, drawback is a requirement on connection to have is_open() API
# We choose minimalistic dependency over better troubleshooting support.
# ----------------------------------------------------------------------
if self.timeout <= 0.0:
raise ConnectionObserverTimeout(self, self.timeout, "before run", "timeout is not positive value")
def __iter__(self): # Python 3.4 support - do we need it?
"""
Implement iterator protocol to support 'yield from' in @asyncio.coroutine
:return:
"""
if self._future is None:
self.start()
assert self._future is not None
return self.runner.wait_for_iterator(self, self._future)
def __await__(self):
"""
Await completion of connection-observer.
Allows to use Python3 'await' syntax
According to https://www.python.org/dev/peps/pep-0492/#await-expression
it is a SyntaxError to use await outside of an async def function.
:return:
"""
# We may have already started connection_observer:
# connection_observer = SomeObserver()
# connection_observer.start()
# then we await it via:
# result = await connection_observer
# but above notation in terms of Python3 async code may also mean "start it and await completion", so it may
# look like:
# connection_observer = SomeObserver()
# result = await connection_observer
return self.__iter__()
def await_done(self, timeout=None):
"""
Await completion of connection-observer.
CAUTION: if you call it from asynchronous code (async def) you may block events loop for long time.
You should rather await it via:
result = await connection_observer
or (to have timeout)
result = await asyncio.wait_for(connection_observer, timeout=10)
or you may delegate blocking call execution to separate thread,
see: https://pymotw.com/3/asyncio/executors.html
:param timeout:
:return: observer result
"""
if self.done():
return self.result()
with exception_stored_if_not_main_thread(self):
if not self.life_status._is_running:
raise ConnectionObserverNotStarted(self)
# check if already is running
self.runner.wait_for(connection_observer=self, connection_observer_future=self._future, timeout=timeout)
return self.result()
def cancel(self):
"""Cancel execution of connection-observer."""
# TODO: call cancel on runner to stop background run of connection-observer
if self.cancelled() or self.done():
return False
self._is_cancelled = True
self._is_done = True
return True
def set_end_of_life(self):
"""
Set end of life of object. Dedicated for runners only!
:return: None
"""
self._is_done = True
def cancelled(self):
"""Return True if the connection-observer has been cancelled."""
return self._is_cancelled
def running(self):
"""Return True if the connection-observer is currently executing."""
if self.done() and self.life_status._is_running:
self.life_status._is_running = False
return self.life_status._is_running
def done(self):
"""Return True if the connection-observer is already done."""
return self._is_done
def set_result(self, result):
"""Should be used to set final result"""
if self.done():
raise ResultAlreadySet(self)
self._result = result
self._is_done = True
def connection_closed_handler(self):
"""
Called by Moler (ThreadedMolerConnection) when connection is closed.
:return: None
"""
if not self.done():
connection_name = self.get_logger_name()
msg = "'{}' is not done but connection '{}' is about to be closed.".format(self, connection_name)
ex = WrongUsage(msg)
self.set_exception(ex)
self.cancel()
@abstractmethod
def data_received(self, data, recv_time):
"""
Entry point where feeders pass data read from connection
Here we perform data parsing to conclude in result setting.
:param data: List of strings sent by device.
:param recv_time: time stamp with the moment when the data was read from connection. Time is given as
datetime.datetime instance.
:return: None.
"""
def set_exception(self, exception):
"""
Should be used to indicate some failure during observation.
:param exception: Exception to set
:return: None
"""
self._set_exception_without_done(exception)
self._is_done = True
def _set_exception_without_done(self, exception):
"""
Should be used to indicate some failure during observation. This method does not finish connection observer
object!
:param exception: exception to set
:return: None
"""
if self._is_done:
self._log(logging.WARNING,
"Attempt to set exception {!r} on already done {}".format(exception, self),
levels_to_go_up=2)
return
ConnectionObserver._change_unraised_exception(new_exception=exception, observer=self)
self._log(logging.INFO,
"{}.{} has set exception {!r}".format(self.__class__.__module__, self, exception),
levels_to_go_up=2)
def result(self):
"""Retrieve final result of connection-observer"""
with ConnectionObserver._exceptions_lock:
ConnectionObserver._log_unraised_exceptions(self)
if self._exception:
exception = self._exception
if exception in ConnectionObserver._not_raised_exceptions:
ConnectionObserver._not_raised_exceptions.remove(exception)
raise exception
if self.cancelled():
raise NoResultSinceCancelCalled(self)
if not self.done():
raise ResultNotAvailableYet(self)
return self._result
def on_timeout(self):
"""Callback called when observer times out"""
pass
def is_command(self):
"""
:return: True if instance of ConnectionObserver is a command. False if not a command.
"""
return False
def extend_timeout(self, timedelta): # TODO: probably API to remove since we have runner tracking .timeout=XXX
prev_timeout = self.timeout
self.timeout = self.timeout + timedelta
msg = "Extended timeout from %.2f with delta %.2f to %.2f" % (prev_timeout, timedelta, self.timeout)
self.runner.timeout_change(timedelta)
self._log(logging.INFO, msg)
def on_inactivity(self):
"""
Callback called when no data is received on connection within self.life_status.inactivity_timeout seconds
:return: None
"""
pass
@ClassProperty
def observer_name(cls):
name = camel_case_to_lower_case_underscore(cls.__name__)
return name
@staticmethod
def get_unraised_exceptions(remove=True):
with ConnectionObserver._exceptions_lock:
if remove:
list_of_exceptions = ConnectionObserver._not_raised_exceptions
ConnectionObserver._not_raised_exceptions = list()
return list_of_exceptions
else:
list_of_exceptions = copy_list(ConnectionObserver._not_raised_exceptions)
return list_of_exceptions
@staticmethod
def _change_unraised_exception(new_exception, observer):
with ConnectionObserver._exceptions_lock:
old_exception = observer._exception
ConnectionObserver._log_unraised_exceptions(observer)
if old_exception:
observer._log(logging.DEBUG,
"{} has overwritten exception. From {!r} to {!r}".format(
observer,
old_exception,
new_exception,
))
if old_exception in ConnectionObserver._not_raised_exceptions:
ConnectionObserver._not_raised_exceptions.remove(old_exception)
else:
observer._log(logging.DEBUG,
"{}: cannot find exception {!r} in _not_raised_exceptions.".format(
observer,
old_exception,
))
ConnectionObserver._log_unraised_exceptions(observer)
ConnectionObserver._not_raised_exceptions.append(new_exception)
observer._exception = new_exception
@staticmethod
def _log_unraised_exceptions(observer):
for i, item in enumerate(ConnectionObserver._not_raised_exceptions):
observer._log(logging.DEBUG, "{:4d} NOT RAISED: {!r}".format(i + 1, item), levels_to_go_up=2)
def get_long_desc(self):
return "Observer '{}.{}'".format(self.__class__.__module__, self)
def get_short_desc(self):
return "Observer '{}.{}'".format(self.__class__.__module__, self)
def _log(self, lvl, msg, extra=None, levels_to_go_up=1):
extra_params = {
'log_name': self.get_logger_name()
}
if extra:
extra_params.update(extra)
# levels_to_go_up=1 : extract caller info to log where _log() has been called from
log_into_logger(self.logger, lvl, msg, extra=extra_params, levels_to_go_up=levels_to_go_up)
log_into_logger(self.device_logger, lvl, msg, extra=extra_params, levels_to_go_up=levels_to_go_up)
| 39.610984 | 117 | 0.643096 |
16a705f417863942716b0e96c261e5736fd3281e | 5,522 | py | Python | h8mail/utils/localsearch.py | denmilu/h8mail_email_OSINT | 5df57799709fbe6ee724f941483a00af360398ac | [
"BSD-3-Clause"
] | 2 | 2019-12-20T04:07:48.000Z | 2020-04-05T14:44:25.000Z | h8mail/utils/localsearch.py | likescam/h8mail_email_OSINT | 5df57799709fbe6ee724f941483a00af360398ac | [
"BSD-3-Clause"
] | null | null | null | h8mail/utils/localsearch.py | likescam/h8mail_email_OSINT | 5df57799709fbe6ee724f941483a00af360398ac | [
"BSD-3-Clause"
] | 2 | 2020-02-15T18:08:58.000Z | 2020-04-05T14:44:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from multiprocessing import Pool
from itertools import takewhile, repeat
from .classes import local_breach_target
from .colors import colors as c
def local_to_targets(targets, local_results):
"""
Appends data from local_breach_target objects using existing list of targets.
Finds corresponding email in dest object list, and adds data to the t.data object variable.
Full output line is stored in t.data[1] and original found data in t.data[2]
"""
for t in targets:
for l in local_results:
if l.email == t.email:
t.data.append(
(
"LOCALSEARCH",
f"[{os.path.basename(l.filepath)}] Line {l.line}: {l.content}".strip(),
l.content.strip(),
)
)
t.pwned = True
return targets
def raw_in_count(filename):
"""
StackOverflow trick to rapidly count lines in big files.
Returns total line number.
"""
c.info_news("Identifying total line number...")
f = open(filename, "rb")
bufgen = takewhile(lambda x: x, (f.raw.read(1024 * 1024) for _ in repeat(None)))
return sum(buf.count(b"\n") for buf in bufgen)
def worker(filepath, target_list):
"""
Searches for every email from target_list in every line of filepath.
Attempts to decode line using utf-8. If it fails, catch and use raw data
"""
try:
with open(filepath, "rb") as fp:
found_list = []
size = os.stat(filepath).st_size
c.info_news(
"Worker [{PID}] is searching for targets in {filepath} ({size} bytes)".format(
PID=os.getpid(), filepath=filepath, size=size
)
)
for cnt, line in enumerate(fp):
for t in target_list:
if t in str(line):
try:
decoded = str(line, "utf-8")
found_list.append(
local_breach_target(t, filepath, cnt, decoded)
)
c.good_news(
f"Found occurrence [{filepath}] Line {cnt}: {decoded}"[:-4]+"****"
)
except Exception as e:
c.bad_news(
f"Got a decoding error line {cnt} - file: {filepath}"
)
c.good_news(
f"Found occurrence [{filepath}] Line {cnt}: {line}"[:-4]+"****"
)
found_list.append(
local_breach_target(t, filepath, cnt, str(line))
)
return found_list
except Exception as e:
c.bad_news("Something went wrong with worker")
print(e)
def local_search(files_to_parse, target_list):
pool = Pool()
found_list = []
async_results = [
pool.apply_async(worker, args=(f, target_list))
for i, f in enumerate(files_to_parse)
]
for r in async_results:
if r.get() is not None:
found_list.extend(r.get())
pool.close()
pool.join()
return found_list
import sys
def progress(count, total, status=""):
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = "=" * filled_len + "-" * (bar_len - filled_len)
sys.stdout.write("[%s] %s%s ...%s\r" % (bar, percents, "%", status))
sys.stdout.write("\033[K")
sys.stdout.flush()
def local_search_single(files_to_parse, target_list):
found_list = []
for file_to_parse in files_to_parse:
with open(file_to_parse, "rb") as fp:
size = os.stat(file_to_parse).st_size
lines_no = raw_in_count(file_to_parse)
c.info_news(
"Searching for targets in {file_to_parse} ({size} bytes, {lines_no} lines)".format(
file_to_parse=file_to_parse, size=size, lines_no=lines_no
)
)
for cnt, line in enumerate(fp):
lines_left = lines_no - cnt
progress(
cnt, lines_no, f"{cnt} lines checked - {lines_left} lines left"
)
for t in target_list:
if t in str(line):
try:
decoded = str(line, "utf-8")
found_list.append(
local_breach_target(t, file_to_parse, cnt, decoded)
)
c.good_news(
f"Found occurrence [{file_to_parse}] Line {cnt}: {decoded}"
)
except Exception as e:
c.bad_news(
f"Got a decoding error line {cnt} - file: {file_to_parse}"
)
c.good_news(
f"Found occurrence [{file_to_parse}] Line {cnt}: {line}"
)
found_list.append(
local_breach_target(t, file_to_parse, cnt, str(line))
)
return found_list
| 36.091503 | 99 | 0.487142 |
96b2270a408424a54e6c0b235e22eb2f896d3c85 | 2,836 | py | Python | PythonFileTransfer/src/main/Utils/NetUtils.py | mgsky1/Mercury | 94bff94443f8a0f4054ff71189804d5e942fb19e | [
"Apache-2.0"
] | 12 | 2019-10-28T15:35:06.000Z | 2021-11-17T06:50:18.000Z | PythonFileTransfer/src/main/Utils/NetUtils.py | mgsky1/Mercury | 94bff94443f8a0f4054ff71189804d5e942fb19e | [
"Apache-2.0"
] | null | null | null | PythonFileTransfer/src/main/Utils/NetUtils.py | mgsky1/Mercury | 94bff94443f8a0f4054ff71189804d5e942fb19e | [
"Apache-2.0"
] | 6 | 2019-12-06T13:36:42.000Z | 2021-11-15T13:13:50.000Z | '''
@desc:网络工具类
@author: Martin Huang
@time: created on 2019/5/29 18:55
@修改记录:2019/6/3 => 完成基础骨架
2019/6/6 => 增加异常处理
2019/6/7 => 增加端口连通性检测
2019/6/8 => BUG修复
'''
import socket
from threading import Lock
from Utils.ConversionUtils import ConversionUtils
from Utils.IOUtils import IOUtils
#pycharm使用
#from src.main.Utils.ConversionUtils import *
#from src.main.Utils.IOUtils import *
class NetUtils:
#传输单个文件
def transferSigFile(path,port=9000,bufferSize=1,verbose=True):
server = socket.socket()
#设置socket选项,SO_REUSEADDR让服务程序结束后立即释放端口,否则操作系统将会持有几分钟,Linux会导致异常
server.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
server.bind(('', port))
server.listen(5)
conn, add = server.accept()
if verbose:
print('Client IP : %s:%d' % add)
file_lock = Lock()
try:
bufferSize =ConversionUtils.megabytes2Bytes(bufferSize)
with open(path,'rb') as file:
file_lock.acquire()
if verbose:
print('连接成功,开始传送文件')
while True:
tdata = file.read(bufferSize)
if not tdata:
break
conn.send(tdata)
if verbose:
print('传输了1个文件')
return 1
except(FileNotFoundError):
print("文件不存在!")
return 0
finally:
server.close()
conn.close()
if not file_lock:
file_lock.release()
#接收单个文件
def receiveSigFile(path,ip,port=9000,bufferSize=1,verbose=True):
client = socket.socket()
while True:
try:
client.connect((ip, port))
break
except:
continue
try:
bufferSize = ConversionUtils.megabytes2Bytes(bufferSize)
with open(path,'wb') as file:
if verbose:
print("连接成功,开始接收文件")
while True:
tdata = client.recv(bufferSize)
if not tdata:
break
file.write(tdata)
if verbose:
print('成功接收了1个文件')
finally:
client.close()
#获取本机IP地址
@classmethod
def getLocalIPAddr(self):
s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip=s.getsockname()[0]
s.shutdown(socket.SHUT_RDWR)
s.close()
return ip
#检测本机特定端口是否被占用
def isPortOccupied(port):
ip = '127.0.0.1'
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
try:
s.connect((ip,port))
s.close()
return True
except:
return False | 29.237113 | 71 | 0.52292 |
86dd311e31406b1f9dd803db14b646d98e5b48ee | 7,228 | py | Python | train.py | patrickctrf/6-DOF-Inertial-Odometry | 4e7a96408db69d609f0250fd6629c39173fc3863 | [
"BSD-3-Clause"
] | null | null | null | train.py | patrickctrf/6-DOF-Inertial-Odometry | 4e7a96408db69d609f0250fd6629c39173fc3863 | [
"BSD-3-Clause"
] | null | null | null | train.py | patrickctrf/6-DOF-Inertial-Odometry | 4e7a96408db69d609f0250fd6629c39173fc3863 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import argparse
from keras.callbacks import ModelCheckpoint, TensorBoard
from keras.models import load_model
from tensorflow.keras.optimizers import Adam
from sklearn.utils import shuffle
from time import time
from dataset import *
from model import *
from util import *
def main():
parser = argparse.ArgumentParser()
parser.add_argument('dataset', choices=['oxiod', 'euroc'], help='Training dataset name (\'oxiod\' or \'euroc\')')
parser.add_argument('output', help='Model output name')
args = parser.parse_args()
np.random.seed(0)
window_size = 200
stride = 10
x_gyro = []
x_acc = []
y_delta_p = []
y_delta_q = []
imu_data_filenames = []
gt_data_filenames = []
if args.dataset == 'oxiod':
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/imu3.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/imu1.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/imu2.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/imu2.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/imu4.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/imu4.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/imu2.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/imu7.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/imu4.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/imu5.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/imu3.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/imu2.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/imu3.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/imu1.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/imu3.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/imu5.csv')
imu_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/imu4.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/vi3.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/vi1.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/vi2.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/vi2.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/vi4.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/vi4.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/vi2.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/vi7.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data5/syn/vi4.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data4/syn/vi5.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/vi3.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/vi2.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data2/syn/vi3.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/vi1.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/vi3.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data3/syn/vi5.csv')
gt_data_filenames.append('Oxford Inertial Odometry Dataset/handheld/data1/syn/vi4.csv')
elif args.dataset == 'euroc':
imu_data_filenames.append('MH_01_easy/mav0/imu0/data.csv')
imu_data_filenames.append('MH_03_medium/mav0/imu0/data.csv')
imu_data_filenames.append('MH_05_difficult/mav0/imu0/data.csv')
imu_data_filenames.append('V1_02_medium/mav0/imu0/data.csv')
imu_data_filenames.append('V2_01_easy/mav0/imu0/data.csv')
imu_data_filenames.append('V2_03_difficult/mav0/imu0/data.csv')
gt_data_filenames.append('MH_01_easy/mav0/state_groundtruth_estimate0/data.csv')
gt_data_filenames.append('MH_03_medium/mav0/state_groundtruth_estimate0/data.csv')
gt_data_filenames.append('MH_05_difficult/mav0/state_groundtruth_estimate0/data.csv')
gt_data_filenames.append('V1_02_medium/mav0/state_groundtruth_estimate0/data.csv')
gt_data_filenames.append('V2_01_easy/mav0/state_groundtruth_estimate0/data.csv')
gt_data_filenames.append('V2_03_difficult/mav0/state_groundtruth_estimate0/data.csv')
for i, (cur_imu_data_filename, cur_gt_data_filename) in enumerate(zip(imu_data_filenames, gt_data_filenames)):
if args.dataset == 'oxiod':
cur_gyro_data, cur_acc_data, cur_pos_data, cur_ori_data = load_oxiod_dataset(cur_imu_data_filename, cur_gt_data_filename)
elif args.dataset == 'euroc':
cur_gyro_data, cur_acc_data, cur_pos_data, cur_ori_data = load_euroc_mav_dataset(cur_imu_data_filename, cur_gt_data_filename)
[cur_x_gyro, cur_x_acc], [cur_y_delta_p, cur_y_delta_q], init_p, init_q = load_dataset_6d_quat(cur_gyro_data, cur_acc_data, cur_pos_data, cur_ori_data, window_size, stride)
x_gyro.append(cur_x_gyro)
x_acc.append(cur_x_acc)
y_delta_p.append(cur_y_delta_p)
y_delta_q.append(cur_y_delta_q)
x_gyro = np.vstack(x_gyro)
x_acc = np.vstack(x_acc)
y_delta_p = np.vstack(y_delta_p)
y_delta_q = np.vstack(y_delta_q)
x_gyro, x_acc, y_delta_p, y_delta_q = shuffle(x_gyro, x_acc, y_delta_p, y_delta_q)
pred_model = create_pred_model_6d_quat(window_size)
train_model = create_train_model_6d_quat(pred_model, window_size)
train_model.compile(optimizer=Adam(0.0001), loss=None)
model_checkpoint = ModelCheckpoint('model_checkpoint.hdf5', monitor='val_loss', save_best_only=True, verbose=1)
tensorboard = TensorBoard(log_dir="logs/{}".format(time()))
history = train_model.fit([x_gyro, x_acc, y_delta_p, y_delta_q], epochs=500, batch_size=32, verbose=1, callbacks=[model_checkpoint, tensorboard], validation_split=0.1)
train_model = load_model('model_checkpoint.hdf5', custom_objects={'CustomMultiLossLayer': CustomMultiLossLayer}, compile=False)
pred_model = create_pred_model_6d_quat(window_size)
pred_model.set_weights(train_model.get_weights()[:-2])
pred_model.save('%s.hdf5' % args.output)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
if __name__ == '__main__':
main()
| 52.376812 | 180 | 0.751522 |
e299c639923e0fe166acdb968c0fd2c4e7ff8264 | 2,802 | py | Python | setup.py | ggraham/certbot-haproxy | 80a62713160a11601de59e28913a819e179ece84 | [
"Apache-2.0"
] | null | null | null | setup.py | ggraham/certbot-haproxy | 80a62713160a11601de59e28913a819e179ece84 | [
"Apache-2.0"
] | null | null | null | setup.py | ggraham/certbot-haproxy | 80a62713160a11601de59e28913a819e179ece84 | [
"Apache-2.0"
] | null | null | null | import sys
from setuptools import setup
from setuptools import find_packages
own_version = '0.1.1'
certbot_version = '0.8.1'
# Please update tox.ini when modifying dependency version requirements
install_requires = [
'acme>={0}'.format(certbot_version),
'certbot>={0}'.format(certbot_version),
# For pkg_resources. >=1.0 so pip resolves it to a version cryptography
# will tolerate; see #2599:
'setuptools>=1.0',
'zope.component',
'zope.interface',
'future',
]
if sys.version_info < (2, 7):
install_requires.append('mock<1.1.0')
else:
install_requires.append('mock')
docs_extras = [
'Sphinx>=1.0', # autodoc_member_order = 'bysource', autodoc_default_flags
'sphinx_rtd_theme',
]
long_description = (
"This is a plugin for Certbot, it enables automatically authenticating "
"domains ans retrieving certificates. It can also restart HAProxy after "
"new certificates are installed. However, it will not configure HAProxy "
"because. HAProxy is unlikely to be used for small/simple setups like what"
" Apache or NGiNX are more likely to be used for. HAProxy configurations "
"vary greatly, any configuration this plugin could define is most likely "
"not applicable in your environment."
)
haproxy_authenticator = 'certbot_haproxy.authenticator:HAProxyAuthenticator'
haproxy_installer = 'certbot_haproxy.installer:HAProxyInstaller'
setup(
name='certbot-haproxy',
version=own_version,
description="HAProxy plugin for Certbot",
long_description=long_description,
url='https://code.greenhost.net/open/certbot-haproxy',
author="Greenhost BV",
author_email='lehaproxy@greenhost.net',
license='Apache License 2.0',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Networking',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
],
packages=find_packages(),
include_package_data=True,
install_requires=install_requires,
extras_require={
'docs': docs_extras,
},
entry_points={
'certbot.plugins': [
'haproxy-authenticator = %s' % haproxy_authenticator,
'haproxy-installer = %s' % haproxy_installer
],
},
# test_suite='certbot_haproxy',
)
| 32.964706 | 79 | 0.669879 |
2f993cfac43ecaf5c5f78a705a56cd95e9ff0e4c | 409 | py | Python | tests/stacks/test_stack_sort.py | davjohnst/fundamentals | f8aff4621432c3187305dd04563425f54ea08495 | [
"Apache-2.0"
] | null | null | null | tests/stacks/test_stack_sort.py | davjohnst/fundamentals | f8aff4621432c3187305dd04563425f54ea08495 | [
"Apache-2.0"
] | null | null | null | tests/stacks/test_stack_sort.py | davjohnst/fundamentals | f8aff4621432c3187305dd04563425f54ea08495 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from unittest import TestCase
from fundamentals.stacks.sort_using_stack import StackSort
class TestStackSort(TestCase):
def test_stack_sort(self):
a = [3, 6, 8, 2, 78, 1, 23, 45, 9]
b = a[:]
std = StackSort.sortStack(a)
true_std = sorted(b)
self.assertEquals(true_std, std)
def main():
pass
if __name__ == "__main__":
main() | 16.36 | 58 | 0.621027 |
2d99c6c0793fde0547ef142cf6ef6088c330f3e1 | 274 | py | Python | transport/transport/doctype/trip_indent_detail/trip_indent_detail.py | finbyz/transport | 7e12b900e79f18f745e20f09433242354e75cdab | [
"MIT"
] | 2 | 2022-02-17T16:03:04.000Z | 2022-03-29T09:27:32.000Z | transport/transport/doctype/trip_indent_detail/trip_indent_detail.py | finbyz/transport | 7e12b900e79f18f745e20f09433242354e75cdab | [
"MIT"
] | null | null | null | transport/transport/doctype/trip_indent_detail/trip_indent_detail.py | finbyz/transport | 7e12b900e79f18f745e20f09433242354e75cdab | [
"MIT"
] | 5 | 2017-09-19T13:21:25.000Z | 2022-02-17T16:03:09.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2018, FinByz Tech Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class TripIndentDetail(Document):
pass
| 24.909091 | 60 | 0.777372 |
ac926c14a03007d8cf87e6d080d868263c2d8e42 | 847 | py | Python | atom/nucleus/python/test/test_page_score.py | AbhiGupta03/SDK | f3a61aae7a847f07f0c22a154ca88dc378e9d25e | [
"Apache-2.0"
] | null | null | null | atom/nucleus/python/test/test_page_score.py | AbhiGupta03/SDK | f3a61aae7a847f07f0c22a154ca88dc378e9d25e | [
"Apache-2.0"
] | null | null | null | atom/nucleus/python/test/test_page_score.py | AbhiGupta03/SDK | f3a61aae7a847f07f0c22a154ca88dc378e9d25e | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Hydrogen Nucleus API
The Hydrogen Nucleus API # noqa: E501
OpenAPI spec version: 1.9.5
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nucleus_api
from nucleus_api.models.page_score import PageScore # noqa: E501
from nucleus_api.rest import ApiException
class TestPageScore(unittest.TestCase):
"""PageScore unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPageScore(self):
"""Test PageScore"""
# FIXME: construct object with mandatory attributes with example values
# model = nucleus_api.models.page_score.PageScore() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 20.658537 | 79 | 0.689492 |
2cba71c458c1b04abc67fc25f3db44d4fc6a7ca9 | 1,496 | py | Python | Config.py | r00tmebaby/Computer-Networking-Assigment-Cleint-Server | e3ce25d1e154a17efb64d8d8c7a1215a678861e6 | [
"MIT"
] | null | null | null | Config.py | r00tmebaby/Computer-Networking-Assigment-Cleint-Server | e3ce25d1e154a17efb64d8d8c7a1215a678861e6 | [
"MIT"
] | null | null | null | Config.py | r00tmebaby/Computer-Networking-Assigment-Cleint-Server | e3ce25d1e154a17efb64d8d8c7a1215a678861e6 | [
"MIT"
] | null | null | null | ##############################################
# Config File : Computer Networking Assigment
# Author : Zdravko Georgiev
# License : MIT
# Github : https://github.com/r00tmebaby
# Copyright (c) 2019 / 31.03.2019
# Version : 0.1
###############################################
import base64
Cipher_Key = "BirkBeck!Computing!2019" # An Unique key that is used for the packet encryption
Enc_Type = "utf-8" # Encoding standart, can be ascii etc. depending on the requirements
Separator = "[*&]D#]" # Used to distinguish the start and the end of each packet data
Server_Host = "127.0.0.1" # Server IP address, can be hostname as well
Server_Port = 8080 # Port number used by the server. Make sure that it is not in use.
Buffer_Size = 1024 # Receiver and sender socket buffer size. A large socket receiver buffer is essential to support high throughput.
def encrypt(clear):
enc = []
for i in range(len(clear)):
key_c = Cipher_Key[i % len(Cipher_Key)]
enc_c = chr((ord(clear[i]) + ord(key_c)) % 256)
enc.append(enc_c)
return base64.urlsafe_b64encode("".join(enc).encode()).decode()
def decrypt(enc):
dec = []
enc = base64.urlsafe_b64decode(enc).decode()
for i in range(len(enc)):
key_c = Cipher_Key[i % len(Cipher_Key)]
dec_c = chr((256 + ord(enc[i]) - ord(key_c)) % 256)
dec.append(dec_c)
return "".join(dec)
| 39.368421 | 153 | 0.584225 |
297916326a291a4b534c9f1b99deb25849f1ebcd | 4,600 | py | Python | app/main.py | pwcazenave/tinypilot | 7e67d9092d7f6e9e49b6520c85b448470ba8b82b | [
"MIT"
] | null | null | null | app/main.py | pwcazenave/tinypilot | 7e67d9092d7f6e9e49b6520c85b448470ba8b82b | [
"MIT"
] | null | null | null | app/main.py | pwcazenave/tinypilot | 7e67d9092d7f6e9e49b6520c85b448470ba8b82b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import logging
import os
import flask
import flask_socketio
import flask_wtf
import hid
import js_to_hid
import local_system
root_logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-15s %(levelname)-4s %(message)s', '%Y-%m-%d %H:%M:%S')
handler.setFormatter(formatter)
root_logger.addHandler(flask.logging.default_handler)
root_logger.setLevel(logging.INFO)
logger = logging.getLogger(__name__)
logger.info('Starting app')
host = os.environ.get('HOST', '0.0.0.0')
port = int(os.environ.get('PORT', 8000))
debug = 'DEBUG' in os.environ
use_reloader = os.environ.get('USE_RELOADER', '1') == '1'
# Location of HID file handle in which to write keyboard HID input.
hid_path = os.environ.get('HID_PATH', '/dev/hidg0')
app = flask.Flask(__name__, static_url_path='')
# TODO(mtlynch): Ideally, we wouldn't accept requests from any origin, but the
# risk of a CSRF attack for this app is very low. Additionally, CORS doesn't
# protect us from the dangerous part of a CSRF attack. Even without same-origin
# enforcement, third-party pages can still *send* requests (i.e. inject
# keystrokes into the target machine) - it doesn't matter much if they can't
# read responses.
socketio = flask_socketio.SocketIO(app, cors_allowed_origins='*')
# Configure CSRF protection.
csrf = flask_wtf.csrf.CSRFProtect(app)
app.config['SECRET_KEY'] = os.urandom(32)
def _parse_key_event(payload):
return js_to_hid.JavaScriptKeyEvent(meta_modifier=payload['metaKey'],
alt_modifier=payload['altKey'],
shift_modifier=payload['shiftKey'],
ctrl_modifier=payload['ctrlKey'],
key=payload['key'],
key_code=payload['keyCode'])
@socketio.on('keystroke')
def socket_keystroke(message):
key_event = _parse_key_event(message)
hid_keycode = None
try:
control_keys, hid_keycode = js_to_hid.convert(key_event)
except js_to_hid.UnrecognizedKeyCodeError:
logger.warning('Unrecognized key: %s (keycode=%d)', key_event.key,
key_event.key_code)
socketio.emit('keystroke-received', {'success': False})
return
if hid_keycode is None:
logger.info('Ignoring %s key (keycode=%d)', key_event.key,
key_event.key_code)
socketio.emit('keystroke-received', {'success': False})
return
try:
hid.send(hid_path, control_keys, hid_keycode)
except hid.WriteError as e:
logger.error('Failed to write key: %s (keycode=%d). %s', key_event.key,
key_event.key_code, e)
socketio.emit('keystroke-received', {'success': False})
return
socketio.emit('keystroke-received', {'success': True})
@socketio.on('keyRelease')
def socket_key_release():
try:
hid.clear(hid_path)
except hid.WriteError as e:
logger.error('Failed to release keys: %s', e)
@socketio.on('connect')
def test_connect():
logger.info('Client connected')
@socketio.on('disconnect')
def test_disconnect():
logger.info('Client disconnected')
@app.route('/', methods=['GET'])
def index_get():
return flask.render_template('index.html')
@app.route('/shutdown', methods=['POST'])
def shutdown_post():
try:
local_system.shutdown()
return flask.jsonify({
'success': True,
'error': None,
})
except local_system.Error as e:
return flask.jsonify({
'success': False,
'error': str(e),
}), 500
@app.route('/restart', methods=['POST'])
def restart_post():
try:
local_system.restart()
return flask.jsonify({
'success': True,
'error': None,
})
except local_system.Error as e:
return flask.jsonify({
'success': False,
'error': str(e),
}), 500
@app.errorhandler(flask_wtf.csrf.CSRFError)
def handle_csrf_error(e):
return flask.jsonify({
'success': False,
'error': e.description,
}), 400
def main():
socketio.run(app,
host=host,
port=port,
debug=debug,
use_reloader=use_reloader,
extra_files=[
'./app/templates/index.html', './app/static/js/app.js',
'./app/static/css/style.css'
])
if __name__ == '__main__':
main()
| 29.487179 | 79 | 0.615 |
24df60998ac79f919f601398aa17f3e4393a3b27 | 6,228 | py | Python | bindings/python/cntk/losses/tests/cosine_distance_test.py | shyamalschandra/CNTK | 0e7a6cd4cc174eab28eaf2ffc660c6380b9e4e2d | [
"MIT"
] | 17,702 | 2016-01-25T14:03:01.000Z | 2019-05-06T09:23:41.000Z | bindings/python/cntk/losses/tests/cosine_distance_test.py | shyamalschandra/CNTK | 0e7a6cd4cc174eab28eaf2ffc660c6380b9e4e2d | [
"MIT"
] | 3,489 | 2016-01-25T13:32:09.000Z | 2019-05-03T11:29:15.000Z | bindings/python/cntk/losses/tests/cosine_distance_test.py | shyamalschandra/CNTK | 0e7a6cd4cc174eab28eaf2ffc660c6380b9e4e2d | [
"MIT"
] | 5,180 | 2016-01-25T14:02:12.000Z | 2019-05-06T04:24:28.000Z | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
"""
Unit tests for the cosine distance class.
"""
import numpy as np
import pytest
import cntk as C
def test_cosine_distance():
a = np.reshape(np.arange(25.0, dtype = np.float32), (5,5))
b = np.reshape(np.arange(0, 5, dtype=np.float32), (1,5))
src = C.sequence.input_variable(shape=(5), sequence_axis=C.Axis("Seq"))
tgt = C.input_variable(shape=(5))
tgt_br = C.sequence.broadcast_as(tgt, src)
cos_seq = C.cosine_distance(src, tgt_br)
assert len(cos_seq.dynamic_axes)==2
assert cos_seq.dynamic_axes[1].name=="Seq"
val = cos_seq.eval({src:[a], tgt:[b]})
expected = [[ 1., 0.914659, 0.878459, 0.86155, 0.851852]]
assert np.allclose(val, expected)
def test_cosine_distance_with_negative_samples():
a = np.array([[1., 1., 0., 0., 0.],
[0., 1., 1., 0., 0.],
[0., 0., 1., 1., 0.],
[0., 0., 0., 1., 1.],
[1., 0., 0., 0., 1.]], dtype=np.float32)
b = np.array([[1., 1., 0., 0., 0.],
[0., 1., 1., 0., 0.],
[0., 0., 1., 1., 0.],
[0., 0., 0., 1., 1.],
[1., 0., 0., 0., 1.]], dtype=np.float32)
qry = C.sequence.input_variable(shape=(5))
doc = C.sequence.input_variable(shape=(5))
num_neg_samples = 2
model = C.cosine_distance_with_negative_samples(qry, doc, shift=1, num_negative_samples=num_neg_samples)
result = model.eval({qry:[a], doc:[b]})
# We expect 1 row per minibatch
np.allclose(len(result), a.shape[0])
# We expect the number of columns to be number of negative samples + 1
np.allclose(result[0].shape[1], num_neg_samples+1)
# The first value is exact match, second ony 1 element match and last one is 0 match
np.allclose(result[0], np.tile([1, 0.5, 0.], (a.shape[0],1)))
def test_cosine_distance_with_negative_samples_with_reduced_sequence():
a = C.sequence.input_variable((3,), sequence_axis=C.Axis("a"))
b = C.sequence.input_variable((3,), sequence_axis=C.Axis("b"))
cd = C.cosine_distance_with_negative_samples(C.sequence.first(a), C.sequence.first(b), 1, 2)
data = np.random.random((4,3)).astype(np.float32)
cd.eval({a:data, b:data})
def test_rank0_output():
x = C.sequence.input_variable(shape=(768,), sequence_axis=C.Axis("B"), needs_gradient=True)
y = C.sequence.input_variable(shape=(768,), sequence_axis=C.Axis("B"), needs_gradient=True)
z = C.cosine_distance(x, y)
batch_num = 2
batch_size = 30
a = np.float32(np.random.rand(batch_num*batch_size,1500,768))
b = np.float32(np.random.rand(batch_num*batch_size,1500,768))
for i in range(batch_num):
bwd, fwd = z.forward({x:a[i*batch_size:(i+1)*batch_size], y:b[i*batch_size:(i+1)*batch_size]}, [z.output], set([z.output]))
grad = z.backward(bwd, {z.output:np.ones_like(fwd[z.output])}, set([x, y]))
class numpy_cos:
def __init__(self, a, b):
self.a = a
self.b = b
def forward(self):
self.dot = np.sum(self.a*self.b, -1)
self.a_sqrt = np.sqrt(np.sum(np.square(self.a), -1)+1e-9)
self.b_sqrt = np.sqrt(np.sum(np.square(self.b), -1)+1e-9)
self.sim = self.dot/(self.a_sqrt*self.b_sqrt)
return self.sim
def backward(self):
self.a_sqrt = np.reshape(self.a_sqrt, self.a_sqrt.shape + (1,))
self.b_sqrt = np.reshape(self.b_sqrt, self.b_sqrt.shape + (1,))
self.sim = np.reshape(self.sim, self.sim.shape + (1,))
ga = self.b/(self.a_sqrt*self.b_sqrt) - self.sim*self.a/np.square(self.a_sqrt)
gb = self.a/(self.a_sqrt*self.b_sqrt) - self.sim*self.b/np.square(self.b_sqrt)
return {'a':ga, 'b':gb}
def test_cos_distane_backward():
x = C.sequence.input_variable(shape=(2,), sequence_axis=C.Axis("B"), needs_gradient=True)
y = C.sequence.input_variable(shape=(2,), sequence_axis=C.Axis("B"), needs_gradient=True)
z = C.cosine_distance(x, y);
a = np.reshape(np.float32([0.25,0.5,0.1,1]), (1,2,2))
b = np.reshape(np.float32([-0.5,1.5,-0.3,-1]), (1,2,2))
bwd, fwd = z.forward({x:a, y:b}, [z.output], set([z.output]))
value = list(fwd.values())[0]
expected = [[0.707107, -0.981665]]
assert np.allclose(value, expected)
grad = z.backward(bwd, {z.output:np.ones_like(value)}, set([x, y]))
x_driv_expected = np.ndarray((1,2,2), dtype=np.float32, buffer=np.float32([-1.131371, 0.565686, -0.188727, 0.018873]))
y_driv_expected = np.ndarray((1,2,2), dtype=np.float32, buffer = np.float32([0.424264, 0.141421,-0.174876, 0.052463]))
assert (np.all(np.absolute(grad[x]-x_driv_expected) < 1e-6))
assert (np.all(np.absolute(grad[y]-y_driv_expected) < 1e-6))
def test_cos_distane_backward2():
x = C.sequence.input_variable(shape=(100,), sequence_axis=C.Axis("B"), needs_gradient=True)
y = C.sequence.input_variable(shape=(100,), sequence_axis=C.Axis("B"), needs_gradient=True)
z = C.cosine_distance(x, y);
np.random.seed(0)
a = np.float32(np.random.rand(10,50,100))
b = np.float32(np.random.rand(10,50,100))
bwd, fwd = z.forward({x:a, y:b}, [z.output], set([z.output]))
value = list(fwd.values())[0]
expected_cos = numpy_cos(a,b)
expected = expected_cos.forward()
assert np.allclose(value, expected)
grad = z.backward(bwd, {z.output:np.ones_like(value)}, set([x, y]))
bwd = expected_cos.backward()
x_driv_expected = bwd['a']
y_driv_expected = bwd['b']
assert (np.all(np.absolute(grad[x]-x_driv_expected) < 1e-6))
assert (np.all(np.absolute(grad[y]-y_driv_expected) < 1e-6))
def test_cos_distane_backward3():
x = C.sequence.input_variable(shape=(100,), sequence_axis=C.Axis("B"), needs_gradient=True)
z = C.cosine_distance(x, x);
np.random.seed(0)
a = np.float32(np.random.rand(10,50,100))
b = a
bwd, fwd = z.forward({x:a}, [z.output], set([z.output]))
value = list(fwd.values())[0]
expected_cos = numpy_cos(a,b)
expected = expected_cos.forward()
assert np.allclose(value, expected)
grad = z.backward(bwd, {z.output:np.ones_like(value)}, set([x]))
bwd = expected_cos.backward()
x_driv_expected = bwd['a']+bwd['b']
assert (np.all(np.absolute(grad[x]-x_driv_expected) < 1e-6))
| 42.367347 | 127 | 0.6421 |
ea781c2202ae703722d14ff9291ebe9fee154b0e | 316 | py | Python | aiozk/features.py | nonsleepr/aiozk | 6b4b32a6989eed6a9e568819ab61c89a110fbbec | [
"MIT"
] | 22 | 2019-08-21T11:39:21.000Z | 2021-08-18T11:21:23.000Z | aiozk/features.py | nonsleepr/aiozk | 6b4b32a6989eed6a9e568819ab61c89a110fbbec | [
"MIT"
] | 44 | 2019-08-12T08:39:57.000Z | 2021-10-08T19:38:46.000Z | aiozk/features.py | nonsleepr/aiozk | 6b4b32a6989eed6a9e568819ab61c89a110fbbec | [
"MIT"
] | 15 | 2017-01-13T12:19:54.000Z | 2019-06-10T22:38:28.000Z | ALL_FEATURES = {
"create_with_stat": (3, 5, 0),
"containers": (3, 5, 1),
"reconfigure": (3, 5, 0),
}
class Features:
def __init__(self, version_info):
for feature_name, version_introduced in ALL_FEATURES.items():
setattr(self, feature_name, version_info >= version_introduced)
| 24.307692 | 75 | 0.642405 |
99f7ebcc7b61b0b715fc254cc3cd6b2febe5d085 | 1,554 | py | Python | app/db/session.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | null | null | null | app/db/session.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | null | null | null | app/db/session.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
import logging
import typing
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from app.core.configuration import get_config
logger = logging.getLogger(__name__)
def get_db_url() -> str:
if get_config().USE_SQLITE:
logger.info('Using SQLITE database')
return f'sqlite:///{get_config().MODEL_STORAGE.joinpath(get_config().DATABASE_NAME)}'
else:
logger.info('Using customized database')
return get_config().DB_URL
def get_db_opts() -> typing.Dict[str, typing.Any]:
if get_config().USE_SQLITE:
return {'connect_args': {"check_same_thread": False}}
else:
return {} if get_config().DB_ARGS is None else get_config().DB_ARGS
def get_engine():
url = get_db_url()
db_options = get_db_opts()
return create_engine(url, **db_options)
SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=get_engine()
)
| 27.75 | 93 | 0.720077 |
321ca26b9eb6d5ab09e5cd730cc989ad8c67116c | 663 | py | Python | src/your_project_name/pipeline_registry.py | corradin-lab/analysis-project-template | 5bb43846a24a84222f4fc816e42b1291b4eb783d | [
"Apache-2.0"
] | null | null | null | src/your_project_name/pipeline_registry.py | corradin-lab/analysis-project-template | 5bb43846a24a84222f4fc816e42b1291b4eb783d | [
"Apache-2.0"
] | 1 | 2022-01-27T05:05:12.000Z | 2022-01-27T17:14:29.000Z | src/your_project_name/pipeline_registry.py | corradin-lab/analysis-project-template | 5bb43846a24a84222f4fc816e42b1291b4eb783d | [
"Apache-2.0"
] | null | null | null | """Project pipelines."""
from typing import Dict
from kedro.pipeline import Pipeline
from your_project_name.pipelines import data_engineering as de
from your_project_name.pipelines import data_science as ds
def register_pipelines() -> Dict[str, Pipeline]:
"""Register the project's pipelines.
Returns:
A mapping from a pipeline name to a ``Pipeline`` object.
"""
data_engineering_pipeline = de.create_pipeline()
data_science_pipeline = ds.create_pipeline()
return {
"de": data_engineering_pipeline,
"ds": data_science_pipeline,
"__default__": data_engineering_pipeline + data_science_pipeline,
}
| 26.52 | 73 | 0.72549 |
abff4906381eb6dc4739a1bb0fea6a3d1defb8f0 | 636 | py | Python | diquencer/events.py | antiero/diquencer | 26a5f5a494a17642ab389dd49f31414792626ae2 | [
"MIT"
] | 16 | 2019-08-06T12:28:27.000Z | 2022-02-10T12:10:34.000Z | diquencer/events.py | antiero/diquencer | 26a5f5a494a17642ab389dd49f31414792626ae2 | [
"MIT"
] | null | null | null | diquencer/events.py | antiero/diquencer | 26a5f5a494a17642ab389dd49f31414792626ae2 | [
"MIT"
] | 2 | 2020-01-26T17:15:26.000Z | 2021-08-23T15:12:27.000Z | from .models import Position
class SequenceEvent:
def __init__(self, pulsestamp):
self.pulsestamp = pulsestamp
def __str__(self):
return f"{self.__class__.__name__} @ {Position(self.pulsestamp)}"
class MuteEvent(SequenceEvent):
def __init__(self, pulsestamp, playing_tracks):
super().__init__(pulsestamp)
self.playing_tracks = playing_tracks
class PatternEvent(SequenceEvent):
def __init__(self, pulsestamp, pattern, repetitions):
super().__init__(pulsestamp)
self.pattern = pattern
self.repetitions = repetitions
class StopEvent(SequenceEvent):
pass
| 23.555556 | 73 | 0.70283 |
85ec23ebe6adc45a2fb448c0915c1ee8eaa52528 | 12,353 | py | Python | zvdata/domain.py | bennysuh/zvt | d982af7e40178c6643315ab03ef6799e70078764 | [
"MIT"
] | null | null | null | zvdata/domain.py | bennysuh/zvt | d982af7e40178c6643315ab03ef6799e70078764 | [
"MIT"
] | null | null | null | zvdata/domain.py | bennysuh/zvt | d982af7e40178c6643315ab03ef6799e70078764 | [
"MIT"
] | 1 | 2020-05-16T09:42:02.000Z | 2020-05-16T09:42:02.000Z | # -*- coding: utf-8 -*-
import logging
import os
from typing import List
from sqlalchemy import create_engine, schema, Column, String, DateTime
from sqlalchemy.engine import Engine
from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base
from sqlalchemy.orm import sessionmaker, Session
from zvdata.structs import EntityMixin
logger = logging.getLogger(__name__)
# provider_dbname -> engine
_db_engine_map = {}
# provider_dbname -> session
_db_session_map = {}
global_providers = []
global_entity_types = []
global_schemas = []
# provider -> [db_name1,db_name2...]
provider_map_dbnames = {
}
# db_name -> [declarative_base1,declarative_base2...]
dbname_map_base = {
}
# db_name -> [declarative_meta1,declarative_meta2...]
dbname_map_schemas = {
}
# entity_type -> schema
entity_type_map_schema = {
}
context = {}
BusinessBase = declarative_base()
class FactorDomain(BusinessBase):
__tablename__ = 'factor_domain'
factor_id = Column(String(length=128), primary_key=True)
entity_id = Column(String(length=128), primary_key=True)
timestamp = Column(DateTime, primary_key=True)
depth_data = Column(String(length=1024))
result_data = Column(String(length=1024))
def init_factor_schema():
register_schema(providers=['zvdata'], db_name='core', schema_base=BusinessBase)
def init_context(data_path: str, ui_path: str, domain_module: str, register_api: bool = False) -> None:
"""
now we just support sqlite engine for storing the data,you need to set the path for the db
:param data_path: the db file path
:type data_path:
:param ui_path: the path for storing render html
:type ui_path:
:param domain_module: the module name of your domains
:type domain_module:
:param register_api: whether register the api
:type register_api:
"""
context['data_path'] = data_path
context['ui_path'] = ui_path
context['domain_module'] = domain_module
context['register_api'] = register_api
if not os.path.exists(data_path):
os.makedirs(data_path)
if not os.path.exists(ui_path):
os.makedirs(ui_path)
init_factor_schema()
def table_name_to_domain_name(table_name: str) -> DeclarativeMeta:
"""
the rules for table_name -> domain_class
:param table_name:
:type table_name:
:return:
:rtype:
"""
parts = table_name.split('_')
domain_name = ''
for part in parts:
domain_name = domain_name + part.capitalize()
return domain_name
def domain_name_to_table_name(domain_name: str) -> str:
parts = []
part = ''
for c in domain_name:
if c.isupper() or c.isdigit():
if part:
parts.append(part)
part = c.lower()
else:
part = part + c
parts.append(part)
if len(parts) > 1:
return '_'.join(parts)
elif parts:
return parts[0]
def enum_value(x):
return [e.value for e in x]
def get_db_name(data_schema: DeclarativeMeta) -> str:
"""
get db name of the domain schema
:param data_schema:
:type data_schema:
:return:
:rtype:
"""
for db_name, base in dbname_map_base.items():
if issubclass(data_schema, base):
return db_name
def get_db_engine(provider: str,
db_name: str = None,
data_schema: object = None) -> Engine:
"""
get db engine of the (provider,db_name) or (provider,data_schema)
:param provider:
:type provider:
:param db_name:
:type db_name:
:param data_schema:
:type data_schema:
:return:
:rtype:
"""
if data_schema:
db_name = get_db_name(data_schema=data_schema)
db_path = os.path.join(context['data_path'], '{}_{}.db'.format(provider, db_name))
engine_key = '{}_{}'.format(provider, db_name)
db_engine = _db_engine_map.get(engine_key)
if not db_engine:
db_engine = create_engine('sqlite:///' + db_path, echo=False)
_db_engine_map[engine_key] = db_engine
return db_engine
def get_db_session(provider: str,
db_name: str = None,
data_schema: object = None) -> Session:
"""
get db session of the (provider,db_name) or (provider,data_schema)
:param provider:
:type provider:
:param db_name:
:type db_name:
:param data_schema:
:type data_schema:
:return:
:rtype:
"""
return get_db_session_factory(provider, db_name, data_schema)()
def get_db_session_factory(provider: str,
db_name: str = None,
data_schema: object = None):
"""
get db session factory of the (provider,db_name) or (provider,data_schema)
:param provider:
:type provider:
:param db_name:
:type db_name:
:param data_schema:
:type data_schema:
:return:
:rtype:
"""
if data_schema:
db_name = get_db_name(data_schema=data_schema)
session_key = '{}_{}'.format(provider, db_name)
session = _db_session_map.get(session_key)
if not session:
session = sessionmaker()
_db_session_map[session_key] = session
return session
def get_schemas(provider: str) -> List[DeclarativeMeta]:
"""
get domain schemas supported by the provider
:param provider:
:type provider:
:return:
:rtype:
"""
schemas = []
for provider1, dbs in provider_map_dbnames.items():
if provider == provider1:
for dbname in dbs:
schemas1 = dbname_map_schemas.get(dbname)
if schemas1:
schemas += schemas1
return schemas
def get_schema_by_name(name: str) -> DeclarativeMeta:
"""
get domain schema by the name
:param name:
:type name:
:return:
:rtype:
"""
for schema in global_schemas:
if schema.__name__ == name:
return schema
def get_schema_columns(schema: DeclarativeMeta) -> object:
"""
get all columns of the domain schema
:param schema:
:type schema:
:return:
:rtype:
"""
return schema.__table__.columns.keys()
api_header = '''
# -*- coding: utf-8 -*-
# this file is generated by register_api function, dont't change it
from typing import List, Union
import pandas as pd
from sqlalchemy.orm import Session
from zvdata.api import get_data
from zvdata.structs import IntervalLevel
'''
api_template = '''
{}
def get_{}(
entity_ids: List[str] = None,
entity_id: str = None,
codes: List[str] = None,
level: Union[IntervalLevel, str] = None,
provider: str = \'{}\',
columns: List = None,
return_type: str = 'df',
start_timestamp: Union[pd.Timestamp, str] = None,
end_timestamp: Union[pd.Timestamp, str] = None,
filters: List = None,
session: Session = None,
order=None,
limit: int = None,
index: str = 'timestamp',
index_is_time: bool = True,
time_field: str = 'timestamp'):
return get_data(data_schema={}, entity_ids=entity_ids, entity_id=entity_id, codes=codes, level=level,
provider=provider,
columns=columns, return_type=return_type, start_timestamp=start_timestamp,
end_timestamp=end_timestamp, filters=filters, session=session, order=order, limit=limit,
index=index, index_is_time=index_is_time, time_field=time_field)
'''
def register_api(provider: str, api_dir: str = '.') -> object:
"""
decorator for registering api of the domain
:param provider:
:type provider:
:param api_dir:
:type api_dir:
:return:
:rtype:
"""
def generate(cls):
if context['register_api']:
import_str = 'from {} import {}'.format(context['domain_module'], cls.__name__)
the_func = api_template.format(import_str, cls.__tablename__, provider, cls.__name__)
with open(os.path.join(api_dir, f'{cls.__tablename__}.api'), "w") as myfile:
myfile.write(the_func)
myfile.write('\n')
return cls
return generate
def generate_api(api_path: str, tmp_api_dir: str) -> object:
"""
function for generate api.py for the register_api domain
:param api_path:
:type api_path:
:param tmp_api_dir:
:type tmp_api_dir:
"""
from os import listdir
from os.path import isfile, join
api_files = [os.path.join(tmp_api_dir, f) for f in listdir(tmp_api_dir) if
isfile(join(tmp_api_dir, f)) and f.endswith('.api')]
with open(os.path.join(api_path, 'api.py'), 'w') as outfile:
outfile.write(api_header)
for api_file in api_files:
with open(api_file) as infile:
print()
outfile.write(infile.read())
os.remove(api_file)
def register_entity(entity_type: str = None):
"""
function for register entity type
:param entity_type:
:type entity_type:
:return:
:rtype:
"""
def register(cls):
# register the entity
if issubclass(cls, EntityMixin):
entity_type_ = entity_type
if not entity_type:
entity_type_ = cls.__name__.lower()
if entity_type_ not in global_entity_types:
global_entity_types.append(entity_type_)
entity_type_map_schema[entity_type_] = cls
return cls
return register
def register_schema(providers: List[str],
db_name: str,
schema_base: DeclarativeMeta):
"""
function for register schema,please declare them before register
:param providers: the supported providers for the schema
:type providers:
:param db_name: database name for the schema
:type db_name:
:param schema_base:
:type schema_base:
:return:
:rtype:
"""
schemas = []
for item in schema_base._decl_class_registry.items():
cls = item[1]
if type(cls) == DeclarativeMeta:
if dbname_map_schemas.get(db_name):
schemas = dbname_map_schemas[db_name]
global_schemas.append(cls)
schemas.append(cls)
dbname_map_schemas[db_name] = schemas
for provider in providers:
# track in in _providers
if provider not in global_providers:
global_providers.append(provider)
if not provider_map_dbnames.get(provider):
provider_map_dbnames[provider] = []
provider_map_dbnames[provider].append(db_name)
dbname_map_base[db_name] = schema_base
# create the db & table
engine = get_db_engine(provider, db_name=db_name)
schema_base.metadata.create_all(engine)
session_fac = get_db_session_factory(provider, db_name=db_name)
session_fac.configure(bind=engine)
for provider in providers:
engine = get_db_engine(provider, db_name=db_name)
# create index for 'timestamp','entity_id','code','report_period','updated_timestamp
for table_name, table in iter(schema_base.metadata.tables.items()):
index_list = []
with engine.connect() as con:
rs = con.execute("PRAGMA INDEX_LIST('{}')".format(table_name))
for row in rs:
index_list.append(row[1])
logger.debug('engine:{},table:{},index:{}'.format(engine, table_name, index_list))
for col in ['timestamp', 'entity_id', 'code', 'report_period', 'created_timestamp', 'updated_timestamp']:
if col in table.c:
column = eval('table.c.{}'.format(col))
index = schema.Index('{}_{}_index'.format(table_name, col), column)
if index.name not in index_list:
index.create(engine)
for cols in [('timestamp', 'entity_id'), ('timestamp', 'code')]:
if (cols[0] in table.c) and (col[1] in table.c):
column0 = eval('table.c.{}'.format(col[0]))
column1 = eval('table.c.{}'.format(col[1]))
index = schema.Index('{}_{}_{}_index'.format(table_name, col[0], col[1]), column0, column1)
if index.name not in index_list:
index.create(engine)
| 28.138952 | 117 | 0.623816 |
1fa755d10f0c225a6ec19f83e951859cd0773f0c | 8,565 | py | Python | recipes/o3tanks/globals/o3tanks.py | loherangrin/o3tanks | dc7d7871bc06096905563f91ad996d3287e48a7e | [
"Apache-2.0"
] | 13 | 2021-08-17T22:50:52.000Z | 2022-01-13T18:08:22.000Z | recipes/o3tanks/globals/o3tanks.py | loherangrin/o3tanks | dc7d7871bc06096905563f91ad996d3287e48a7e | [
"Apache-2.0"
] | 26 | 2021-08-17T18:42:44.000Z | 2022-02-15T23:11:17.000Z | recipes/o3tanks/globals/o3tanks.py | loherangrin/o3tanks | dc7d7871bc06096905563f91ad996d3287e48a7e | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Matteo Grasso
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..utils.types import JsonPropertyKey, LinuxOSNames, ObjectEnum, OperatingSystem, OSFamilies, User
import os
import pathlib
import platform
import typing
# --- TYPES ---
class BuilderCommands(ObjectEnum):
BUILD = "build"
CLEAN = "clean"
INIT = "init"
SETTINGS = "settings"
class CliCommands(ObjectEnum):
INSTALL = "install"
LIST = "list"
REFRESH = "refresh"
UNINSTALL = "uninstall"
UPGRADE = "upgrade"
ADD = "add"
BUILD = BuilderCommands.BUILD.value
CLEAN = BuilderCommands.CLEAN.value
INIT = BuilderCommands.INIT.value
OPEN = "open"
REMOVE = "remove"
RUN = "run"
SETTINGS = BuilderCommands.SETTINGS.value
HELP = "help"
INFO = "info"
VERSION = "version"
class CliSubCommands(ObjectEnum):
ENGINE = "engine"
GEM = "gem"
PROJECT = "project"
SELF = [ "self", "o3tanks" ]
class UpdaterCommands(ObjectEnum):
INIT = CliCommands.INIT.value
REFRESH = CliCommands.REFRESH.value
UPGRADE = CliCommands.UPGRADE.value
class RunnerCommands(ObjectEnum):
OPEN = CliCommands.OPEN.value
RUN = CliCommands.RUN.value
class GPUDrivers(ObjectEnum):
AMD_OPEN = "amdgpu",
AMD_PROPRIETARY = "amdgpu-pro"
INTEL = "i915",
NVIDIA_OPEN = "nouveau",
NVIDIA_PROPRIETARY = "nvidia"
class GemReferenceTypes(ObjectEnum):
ENGINE = "engine"
PATH = "path"
VERSION = "version"
class GemReference(typing.NamedTuple):
type: GemReferenceTypes
value: any
def print(self):
if self.type is GemReferenceTypes.ENGINE:
return "{}/{}".format(GemReferenceTypes.ENGINE.value, self.value)
elif self.type is GemReferenceTypes.PATH:
return str(self.value)
else:
return self.value
class Images(ObjectEnum):
BUILDER = "builder"
CLI = "cli"
INSTALL_BUILDER = "install-builder"
INSTALL_RUNNER = "install-runner"
RUNNER = "runner"
UPDATER = "updater"
class LongOptions(ObjectEnum):
ALIAS = "as"
BRANCH = "branch"
CLEAR = "clear"
COMMIT = "commit"
CONFIG = "config"
ENGINE = "engine"
FORCE = "force"
FORK = "fork"
HELP = CliCommands.HELP.value
INCREMENTAL = "incremental"
MINIMAL_PROJECT = "minimal"
PATH = "path"
PROJECT = "project"
QUIET = "quiet"
SKIP_EXAMPLES = "no-project"
SKIP_REBUILD = "no-rebuild"
REMOVE_BUILD = "remove-build"
REMOVE_INSTALL = "remove-install"
REPOSITORY = "repository"
SAVE_IMAGES = "save-images"
TAG = "tag"
TYPE = "type"
VERBOSE = "verbose"
VERSION = CliCommands.VERSION.value
WORKFLOW = "workflow"
WORKFLOW_ENGINE = "engine-centric"
WORKFLOW_PROJECT = "project-centric/engine-source"
WORKFLOW_SDK = "project-centric/engine-prebuilt"
class ShortOptions(ObjectEnum):
CONFIG = 'c'
ENGINE = 'e'
FORCE = 'f'
HELP = 'h'
PROJECT = 'p'
QUIET = 'q'
VERBOSE = 'v'
WORKFLOW = 'w'
class Settings(ObjectEnum):
ENGINE = "engine"
GEMS = "gems"
class EngineSettings(ObjectEnum):
VERSION = JsonPropertyKey(Settings.ENGINE.value, None, "id")
REPOSITORY = JsonPropertyKey(Settings.ENGINE.value, None, "repository")
BRANCH = JsonPropertyKey(Settings.ENGINE.value, None, "branch")
REVISION = JsonPropertyKey(Settings.ENGINE.value, None, "revision")
WORKFLOW = JsonPropertyKey(Settings.ENGINE.value, None, "workflow")
class GemSettings(ObjectEnum):
VERSION = JsonPropertyKey(Settings.GEMS.value, -1, "id")
REPOSITORY = JsonPropertyKey(Settings.GEMS.value, -1, "repository")
BRANCH = JsonPropertyKey(Settings.GEMS.value, -1, "branch")
REVISION = JsonPropertyKey(Settings.GEMS.value, -1, "revision")
ABSOLUTE_PATH = JsonPropertyKey(Settings.GEMS.value, -1, "absolute_path")
RELATIVE_PATH = JsonPropertyKey(Settings.GEMS.value, -1, "relative_path")
class Targets(ObjectEnum):
ENGINE = "engine"
GEM = "gem"
PROJECT = "project"
SELF = "self"
class Volumes(ObjectEnum):
GEMS = "gems"
BUILD = "build"
INSTALL = "install"
PACKAGES = "packages"
SOURCE = "source"
# --- FUNCTIONS ---
def init_from_env(env_name, env_type, default_value):
env_value = os.environ.get(env_name)
if env_value is None:
return default_value
if env_type is bool:
value = (env_value.lower() in [ "1", "on", "true"])
else:
value = env_type(env_value)
return value
def get_os():
os_family_name = platform.system()
if os_family_name == "Linux":
os_family = OSFamilies.LINUX
if RUN_CONTAINERS:
env_value = os.environ.get("O3TANKS_CONTAINER_OS")
if env_value is not None:
delimiter = ':'
if delimiter in env_value:
substring_1, substring_2 = env_value.split(delimiter, 1)
else:
substring_1 = env_value
substring_2 = ''
os_name = LinuxOSNames.from_value(substring_1)
os_version = substring_2 if (len(substring_2) > 0 and substring_2 != "latest") else None
else:
os_name = LinuxOSNames.UBUNTU
os_version = "20.04"
else:
os_name = None
os_version = None
elif os_family_name == "Darwin":
os_family = OSFamilies.MAC
os_name = None
os_version = None
elif os_family_name == "Windows":
os_family = OSFamilies.WINDOWS
os_name = None
os_version = None
else:
os_family = None
os_name = None
os_version = None
return OperatingSystem(os_family, os_name, os_version)
def get_default_root_dir():
path = "/home/{}/o3tanks".format(USER_NAME)
return (pathlib.PosixPath(path) if RUN_CONTAINERS else pathlib.PurePosixPath(path))
def get_default_data_dir(operating_system):
if RUN_CONTAINERS:
return None
if operating_system.family is OSFamilies.LINUX:
data_dir = pathlib.PosixPath.home() / ".local" / "share"
elif operating_system.family is OSFamilies.MAC:
data_dir = pathlib.PosixPath.home() / "Library" / "Application Support"
elif operating_system.family is OSFamilies.WINDOWS:
data_dir = pathlib.WindowsPath(os.environ["LOCALAPPDATA"])
else:
return None
data_dir /= "o3tanks"
return data_dir
# --- CONSTANTS ---
DEVELOPMENT_MODE = init_from_env("O3TANKS_DEV_MODE", bool, False)
RUN_CONTAINERS = not init_from_env("O3TANKS_NO_CONTAINERS", bool, False)
DISPLAY_ID = init_from_env("O3TANKS_DISPLAY_ID", int, -1)
GPU_DRIVER_NAME = init_from_env("O3TANKS_GPU", GPUDrivers, None)
OPERATING_SYSTEM = get_os()
PROJECT_EXTRA_PATH = pathlib.PurePath(".o3tanks")
PUBLIC_PROJECT_EXTRA_PATH = PROJECT_EXTRA_PATH / "public"
PRIVATE_PROJECT_EXTRA_PATH = PROJECT_EXTRA_PATH / "private"
PUBLIC_PROJECT_SETTINGS_PATH = PUBLIC_PROJECT_EXTRA_PATH / "settings.json"
PRIVATE_PROJECT_SETTINGS_PATH = PRIVATE_PROJECT_EXTRA_PATH / "settings.json"
USER_NAME = "user"
USER_GROUP = USER_NAME
REAL_USER = User(
init_from_env("O3TANKS_REAL_USER_NAME", str, None),
init_from_env("O3TANKS_REAL_USER_GROUP", str, None),
init_from_env("O3TANKS_REAL_USER_UID", int, None),
init_from_env("O3TANKS_REAL_USER_GID", int, None)
)
ROOT_DIR = init_from_env("O3TANKS_DIR", pathlib.Path, get_default_root_dir())
DATA_DIR = init_from_env("O3TANKS_DATA_DIR",pathlib.Path, get_default_data_dir(OPERATING_SYSTEM))
if DATA_DIR is not None:
if not DATA_DIR.is_absolute():
DATA_DIR = DATA_DIR.resolve()
RECIPES_PATH = pathlib.PurePath("recipes")
SCRIPTS_PATH = RECIPES_PATH / "o3tanks"
VERSION_MAJOR = 0
VERSION_MINOR = 2
VERSION_PATCH = 0
VERSION_PRE_RELEASE = "wip"
WORKSPACE_GEM_DOCUMENTATION_PATH = "docs"
WORKSPACE_GEM_EXAMPLE_PATH = "examples"
WORKSPACE_GEM_SOURCE_PATH = "gem"
# --- VARIABLES ---
BIN_FILE = None
REAL_BIN_FILE = None
REAL_PROJECT_DIR = None
# --- FUNCTIONS ---
def get_bin_name():
global BIN_FILE
return BIN_FILE.name if BIN_FILE is not None else "o3tanks"
def get_real_bin_file():
global REAL_BIN_FILE
return REAL_BIN_FILE
def get_real_project_dir():
global REAL_PROJECT_DIR
return REAL_PROJECT_DIR
def set_bin_file(value):
global BIN_FILE
BIN_FILE = pathlib.PurePath(value)
def set_real_bin_file(value):
global REAL_BIN_FILE
REAL_BIN_FILE = pathlib.PurePath(value)
def set_real_project_dir(value):
global REAL_PROJECT_DIR
REAL_PROJECT_DIR = pathlib.PurePath(value)
def get_version_number():
version = "{}.{}.{}".format(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
if VERSION_PRE_RELEASE is not None:
version += "-{}".format(VERSION_PRE_RELEASE)
return version
| 23.660221 | 102 | 0.740572 |
2c56bd1b8386034031b38a0d807163986abc7c44 | 507 | py | Python | tests/test_service.py | drewsonne/aws-tag-a-day | 1b85127e81625a2b6df878d339c81910bc395ef8 | [
"Apache-2.0"
] | null | null | null | tests/test_service.py | drewsonne/aws-tag-a-day | 1b85127e81625a2b6df878d339c81910bc395ef8 | [
"Apache-2.0"
] | null | null | null | tests/test_service.py | drewsonne/aws-tag-a-day | 1b85127e81625a2b6df878d339c81910bc395ef8 | [
"Apache-2.0"
] | null | null | null | from unittest import TestCase
from tag_a_day.services.service import Service
class TestService(TestCase):
def test__build_tag_sets(self):
src = Service(None, None, None, None)
_, missing_tags = src._build_tag_sets(
expected_tags=['a', 'b', 'c', 'd', 'e'],
evaluated_tags=['c'],
tags=[
{'Key': 'a', 'Value': '1'},
{'Key': 'b', 'Value': '2'}
]
)
self.assertEqual(missing_tags, ['d', 'e'])
| 26.684211 | 52 | 0.514793 |
294f9b4c7ec347392ae8f9cb223ea36583df1b5f | 6,715 | py | Python | Tynda-Nizniy_Bestyakh/Tynda-Nizniy_Bestyakh.py | trolleway/train_diagrams | bd5316bd3e2fcb30b62d928301bf42a2db3c867d | [
"Apache-2.0"
] | null | null | null | Tynda-Nizniy_Bestyakh/Tynda-Nizniy_Bestyakh.py | trolleway/train_diagrams | bd5316bd3e2fcb30b62d928301bf42a2db3c867d | [
"Apache-2.0"
] | 1 | 2020-01-07T17:33:51.000Z | 2020-01-07T17:33:51.000Z | Tynda-Nizniy_Bestyakh/Tynda-Nizniy_Bestyakh.py | trolleway/train_diagrams | bd5316bd3e2fcb30b62d928301bf42a2db3c867d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.dates as mdates
import dateutil
from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,
AutoMinorLocator)
import datetime
# Template code for generation of train sheldue diagram
# Copy this file, and type in your times
# data sample
traintimes=dict()
stationcalls=dict()
annotates=list()
# Type graph label here
title = u"Тында - Нижний Бестях. График движения поездов на 2020\n"
title = title+"Tynda-Nizniy_Bestyakh. Train diagram for 2020"
svg_filename = 'Tynda-Nizniy_Bestyakh.svg'
# Station names here.
# Keys can be order numbers, or kilometers.
stations=dict()
stations[0]=u"Тында"
stations[227]=u'Нерюнгри'
stations[586]=u'Томмот'
stations[1022]=u'Нижний Бестях'
stations[1056]=u'Якутск'
# You can get kilometers from OpenStreetMap under open license at
#http://brouter.de/brouter-web/#map=10/55.5838/37.2927/standard,HikeBike.HillShading&lonlats=37.581269,55.777273;36.881192,55.700614&profile=rail
#transliterate station names
#may be replaced to load dataset with official latin names
from transliterate import translit, get_available_language_codes
for id in stations:
stations[id] = stations[id]+"\n"+translit(stations[id], 'ru',reversed=True)
# Timezones
# In most cases, you do not need to use timezones, type times in local time
# If line is so long as Baikal-Amur mainline,
# type times in standart time (Moscow), and type timedelta here (Moscow + 6 hours)
#TIMEZONE is MSK
time_add = datetime.timedelta(hours=0)
# Type train numbers or IDs here.
# numbers can be numeric or string, and converted to string later.
trainnumbers=(325,687,376,324,328,'river')
trainnumbers = trainnumbers + (375,325,'687b',6164,6408,6186,6410,7464,6412,6414)
for id in trainnumbers:
traintimes[str(id)]=list()
stationcalls[str(id)]=list()
# Type train sheldue for each train
# Date is optional
# Local time
trainnumber='325'
traintimes[trainnumber].append('2020-02-01 02:00')
stationcalls[trainnumber].append(0)
traintimes[trainnumber].append('2020-02-01 07:26')
stationcalls[trainnumber].append(227)
# Optionaly set annotates
annotates.append({'datetime':'2020-02-01 02:00','station':0,'text':u'Каждый день\nevery day'})
trainnumber='687'
traintimes[trainnumber].append('2020-02-01 10:16')
stationcalls[trainnumber].append(0)
traintimes[trainnumber].append('2020-02-01 15:16')
stationcalls[trainnumber].append(227)
annotates.append({'datetime':'2020-02-01 10:16','station':0,'text':u'По дням\nSome days'})
trainnumber='376'
traintimes[trainnumber].append('2020-02-01 18:10')
stationcalls[trainnumber].append(0)
traintimes[trainnumber].append('2020-02-01 23:27')
stationcalls[trainnumber].append(227)
annotates.append({'datetime':'2020-02-01 18:10','station':0,'text':u'По дням\nSome days'})
#nerungru-tommot
trainnumber='324'
traintimes[trainnumber].append('2020-02-01 09:20')
stationcalls[trainnumber].append(227)
traintimes[trainnumber].append('2020-02-01 17:25')
stationcalls[trainnumber].append(586)
annotates.append({'datetime':'2020-02-01 09:20','station':227,'text':u'Каждый день\nevery day'})
#tommot-nizniy_bestyakh
trainnumber='328'
traintimes[trainnumber].append('2020-02-01 19:00')
stationcalls[trainnumber].append(586)
traintimes[trainnumber].append('2020-02-02 05:02')
stationcalls[trainnumber].append(1022)
annotates.append({'datetime':'2020-02-01 19:00','station':586,'text':u'По дням\nSome days'})
#bus+ship
trainnumber='river'
traintimes[trainnumber].append('2020-02-02 05:17')
stationcalls[trainnumber].append(1022)
traintimes[trainnumber].append('2020-02-02 07:15')
stationcalls[trainnumber].append(1056)
annotates.append({'datetime':'2020-02-01 00:15','station':1022,'text':u'Летом - теплоход до речного вокзала, зимой - автобус до 202 квартала\nbus+ship to Yakutsk river terminal, winter - bus to Yakutsk 202 quarter'})
# down trains
'''
trainnumber='375'
traintimes[trainnumber].append('2020-02-01 06:10')
stationcalls[trainnumber].append(227)
traintimes[trainnumber].append('2020-02-01 11:26')
stationcalls[trainnumber].append(0)
# Optionaly set annotates
annotates.append({'datetime':'2020-02-01 11:26','station':0,'text':u'odd days'})
trainnumber='687b'
traintimes[trainnumber].append('2020-02-01 18:01')
stationcalls[trainnumber].append(227)
traintimes[trainnumber].append('2020-02-01 23:05')
stationcalls[trainnumber].append(0)
annotates.append({'datetime':'2020-02-01 23:05','station':0,'text':u'even days'})
trainnumber='325'
traintimes[trainnumber].append('2020-02-01 19:20')
stationcalls[trainnumber].append(0)
traintimes[trainnumber].append('2020-02-02 00:34')
stationcalls[trainnumber].append(227)
annotates.append({'datetime':'2020-02-01 18:10','station':0,'text':u'every day\n в Хабаровск'})
'''
# end of data
# STYLING
# Line style, see refrence at https://matplotlib.org/2.0.2/api/lines_api.html
train_line_style='g-'
vertical_hour_ticks_interval=2
horizontal_axis_label_format='%H'
x_bounds = [datetime.datetime(2020, 02, 01,00,00), datetime.datetime(2020, 02, 02,8,00)]
figsize=(9,9)
# END OF STYLING
def convert_dates(times_list):
n = list()
for i in times_list:
temp_dt = dateutil.parser.parse(str(i)) + time_add
n.append(temp_dt)
return(n)
for k in traintimes:
temp_dict=dict()
temp_dict=convert_dates(traintimes[k])
traintimes[k] = temp_dict
fig, ax = plt.subplots(figsize=figsize)
# styling
hours = mdates.HourLocator(interval=vertical_hour_ticks_interval)
hours_fmt = mdates.DateFormatter(horizontal_axis_label_format)
plt.title(title)
# station labels generate
station_names=list()
station_pks=list()
for elem in sorted(stations.items()) :
print(elem[0] , " ::" , elem[1] )
station_names.append(elem[1])
station_pks.append(elem[0])
plt.yticks(station_pks)
ax.set_yticklabels(station_names)
ax.set_xlim(x_bounds)
for trainnumber in traintimes:
print(trainnumber)
ax.plot(traintimes[trainnumber],stationcalls[trainnumber],train_line_style,label=trainnumber, color = 'gray', antialiased=False)
#ax.set_ylabel(r'stations')
ax.xaxis.set_major_locator(hours)
ax.xaxis.set_major_formatter(hours_fmt)
plt.gcf().autofmt_xdate()
ax.grid(True)
#Annotates
if len(annotates) > 0:
for annotate in annotates:
ax.annotate(annotate['text'], (mdates.date2num(dateutil.parser.parse(str(annotate['datetime']))), annotate['station']), xytext=(15, 15),
textcoords='offset points', arrowprops=dict(arrowstyle='-|>'))
#plt.legend(title='Trains:')
plt.savefig(svg_filename)
plt.show() | 28.574468 | 216 | 0.739985 |
9332cd98f56e9d8802fddde4f6b3be99b353890c | 31,891 | py | Python | localstack/utils/aws/aws_stack.py | payton/localstack | 937bec26583ed854d3abfc6ca7d5c21ca1b4539b | [
"Apache-2.0"
] | null | null | null | localstack/utils/aws/aws_stack.py | payton/localstack | 937bec26583ed854d3abfc6ca7d5c21ca1b4539b | [
"Apache-2.0"
] | null | null | null | localstack/utils/aws/aws_stack.py | payton/localstack | 937bec26583ed854d3abfc6ca7d5c21ca1b4539b | [
"Apache-2.0"
] | null | null | null | import os
import re
import json
import time
import boto3
import base64
import logging
import six
from six.moves.urllib.parse import quote_plus, unquote_plus
from localstack import config
from localstack.constants import (
REGION_LOCAL, LOCALHOST, MOTO_ACCOUNT_ID, ENV_DEV, APPLICATION_AMZ_JSON_1_1,
APPLICATION_AMZ_JSON_1_0, APPLICATION_X_WWW_FORM_URLENCODED, TEST_AWS_ACCOUNT_ID)
from localstack.utils.common import (
run_safe, to_str, is_string, is_string_or_bytes, make_http_request, is_port_open, get_service_protocol)
from localstack.utils.aws.aws_models import KinesisStream
# AWS environment variable names
ENV_ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
ENV_SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
ENV_SESSION_TOKEN = 'AWS_SESSION_TOKEN'
# set up logger
LOG = logging.getLogger(__name__)
# cache local region
LOCAL_REGION = None
# Use this field if you want to provide a custom boto3 session.
# This field takes priority over CREATE_NEW_SESSION_PER_BOTO3_CONNECTION
CUSTOM_BOTO3_SESSION = None
# Use this flag to enable creation of a new session for each boto3 connection.
# This flag will be ignored if CUSTOM_BOTO3_SESSION is specified
CREATE_NEW_SESSION_PER_BOTO3_CONNECTION = False
# Used in AWS assume role function
INITIAL_BOTO3_SESSION = None
# Boto clients cache
BOTO_CLIENTS_CACHE = {}
# Assume role loop seconds
DEFAULT_TIMER_LOOP_SECONDS = 60 * 50
# maps SQS queue ARNs to queue URLs
SQS_ARN_TO_URL_CACHE = {}
class Environment(object):
def __init__(self, region=None, prefix=None):
# target is the runtime environment to use, e.g.,
# 'local' for local mode
self.region = region or get_local_region()
# prefix can be 'prod', 'stg', 'uat-1', etc.
self.prefix = prefix
def apply_json(self, j):
if isinstance(j, str):
j = json.loads(j)
self.__dict__.update(j)
@staticmethod
def from_string(s):
parts = s.split(':')
if len(parts) == 1:
if s in PREDEFINED_ENVIRONMENTS:
return PREDEFINED_ENVIRONMENTS[s]
parts = [get_local_region(), s]
if len(parts) > 2:
raise Exception('Invalid environment string "%s"' % s)
region = parts[0]
prefix = parts[1]
return Environment(region=region, prefix=prefix)
@staticmethod
def from_json(j):
if not isinstance(j, dict):
j = j.to_dict()
result = Environment()
result.apply_json(j)
return result
def __str__(self):
return '%s:%s' % (self.region, self.prefix)
PREDEFINED_ENVIRONMENTS = {
ENV_DEV: Environment(region=REGION_LOCAL, prefix=ENV_DEV)
}
def get_environment(env=None, region_name=None):
"""
Return an Environment object based on the input arguments.
Parameter `env` can be either of:
* None (or empty), in which case the rules below are applied to (env = os.environ['ENV'] or ENV_DEV)
* an Environment object (then this object is returned)
* a string '<region>:<name>', which corresponds to Environment(region='<region>', prefix='<prefix>')
* the predefined string 'dev' (ENV_DEV), which implies Environment(region='local', prefix='dev')
* a string '<name>', which implies Environment(region=DEFAULT_REGION, prefix='<name>')
Additionally, parameter `region_name` can be used to override DEFAULT_REGION.
"""
if not env:
if 'ENV' in os.environ:
env = os.environ['ENV']
else:
env = ENV_DEV
elif not is_string(env) and not isinstance(env, Environment):
raise Exception('Invalid environment: %s' % env)
if is_string(env):
env = Environment.from_string(env)
if region_name:
env.region = region_name
if not env.region:
raise Exception('Invalid region in environment: "%s"' % env)
return env
def is_local_env(env):
return not env or env.region == REGION_LOCAL or env.prefix == ENV_DEV
class Boto3Session(boto3.session.Session):
""" Custom boto3 session that points to local endpoint URLs. """
def resource(self, service, *args, **kwargs):
self._fix_endpoint(kwargs)
return connect_to_resource(service, *args, **kwargs)
def client(self, service, *args, **kwargs):
self._fix_endpoint(kwargs)
return connect_to_service(service, *args, **kwargs)
def _fix_endpoint(self, kwargs):
if 'amazonaws.com' in kwargs.get('endpoint_url', ''):
kwargs.pop('endpoint_url')
def get_boto3_credentials():
global INITIAL_BOTO3_SESSION
if CUSTOM_BOTO3_SESSION:
return CUSTOM_BOTO3_SESSION.get_credentials()
if not INITIAL_BOTO3_SESSION:
INITIAL_BOTO3_SESSION = boto3.session.Session()
return INITIAL_BOTO3_SESSION.get_credentials()
def get_boto3_session():
if CUSTOM_BOTO3_SESSION:
return CUSTOM_BOTO3_SESSION
if CREATE_NEW_SESSION_PER_BOTO3_CONNECTION:
return boto3.session.Session()
# return default session
return boto3
def get_region():
# TODO look up region from context
return get_local_region()
def get_local_region():
global LOCAL_REGION
if LOCAL_REGION is None:
session = boto3.session.Session()
LOCAL_REGION = session.region_name or ''
return LOCAL_REGION or config.DEFAULT_REGION
def get_local_service_url(service_name_or_port):
""" Return the local service URL for the given service name or port. """
if isinstance(service_name_or_port, int):
return '%s://%s:%s' % (get_service_protocol(), LOCALHOST, service_name_or_port)
service_name = service_name_or_port
if service_name == 's3api':
service_name = 's3'
elif service_name == 'runtime.sagemaker':
service_name = 'sagemaker-runtime'
service_name_upper = service_name.upper().replace('-', '_').replace('.', '_')
return os.environ['TEST_%s_URL' % service_name_upper]
def is_service_enabled(service_name):
""" Return whether the service with the given name (e.g., "lambda") is available. """
try:
url = get_local_service_url(service_name)
assert url
return is_port_open(url, http_path='/', expect_success=False)
except Exception:
return False
def connect_to_resource(service_name, env=None, region_name=None, endpoint_url=None, *args, **kwargs):
"""
Generic method to obtain an AWS service resource using boto3, based on environment, region, or custom endpoint_url.
"""
return connect_to_service(service_name, client=False, env=env, region_name=region_name, endpoint_url=endpoint_url)
def connect_to_service(service_name, client=True, env=None, region_name=None, endpoint_url=None,
config=None, verify=False, *args, **kwargs):
"""
Generic method to obtain an AWS service client using boto3, based on environment, region, or custom endpoint_url.
"""
region_name = region_name or get_region()
env = get_environment(env, region_name=region_name)
region = env.region if env.region != REGION_LOCAL else region_name
key_elements = [service_name, client, env, region, endpoint_url, config]
cache_key = '/'.join([str(k) for k in key_elements])
if cache_key not in BOTO_CLIENTS_CACHE:
# Cache clients, as this is a relatively expensive operation
my_session = get_boto3_session()
method = my_session.client if client else my_session.resource
if not endpoint_url:
if is_local_env(env):
endpoint_url = get_local_service_url(service_name)
verify = False
BOTO_CLIENTS_CACHE[cache_key] = method(service_name, region_name=region,
endpoint_url=endpoint_url, verify=verify, config=config)
return BOTO_CLIENTS_CACHE[cache_key]
class VelocityInput:
"""Simple class to mimick the behavior of variable '$input' in AWS API Gateway integration velocity templates.
See: http://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-mapping-template-reference.html"""
def __init__(self, value):
self.value = value
def path(self, path):
from jsonpath_rw import parse
value = self.value if isinstance(self.value, dict) else json.loads(self.value)
jsonpath_expr = parse(path)
result = [match.value for match in jsonpath_expr.find(value)]
result = result[0] if len(result) == 1 else result
return result
def json(self, path):
return json.dumps(self.path(path))
def __repr__(self):
return '$input'
class VelocityUtil:
"""Simple class to mimick the behavior of variable '$util' in AWS API Gateway integration velocity templates.
See: http://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-mapping-template-reference.html"""
def base64Encode(self, s):
if not isinstance(s, str):
s = json.dumps(s)
encoded_str = s.encode(config.DEFAULT_ENCODING)
encoded_b64_str = base64.b64encode(encoded_str)
return encoded_b64_str.decode(config.DEFAULT_ENCODING)
def base64Decode(self, s):
if not isinstance(s, str):
s = json.dumps(s)
return base64.b64decode(s)
def toJson(self, obj):
return obj and json.dumps(obj)
def urlEncode(self, s):
return quote_plus(s)
def urlDecode(self, s):
return unquote_plus(s)
def escapeJavaScript(self, s):
return str(s).replace("'", r"\'")
def render_velocity_template(template, context, variables={}, as_json=False):
import airspeed
# run a few fixes to properly prepare the template
template = re.sub(r'(^|\n)#\s+set(.*)', r'\1#set\2', template, re.MULTILINE)
t = airspeed.Template(template)
var_map = {
'input': VelocityInput(context),
'util': VelocityUtil()
}
var_map.update(variables or {})
replaced = t.merge(var_map)
if as_json:
replaced = json.loads(replaced)
return replaced
def check_valid_region(headers):
""" Check whether a valid region is provided, and if not then raise an Exception. """
auth_header = headers.get('Authorization')
if not auth_header:
raise Exception('Unable to find "Authorization" header in request')
replaced = re.sub(r'.*Credential=([^,]+),.*', r'\1', auth_header)
if auth_header == replaced:
raise Exception('Unable to find "Credential" section in "Authorization" header')
# Format is: <your-access-key-id>/<date>/<aws-region>/<aws-service>/aws4_request
# See https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-auth-using-authorization-header.html
parts = replaced.split('/')
region = parts[2]
if region not in config.VALID_REGIONS:
raise Exception('Invalid region specified in "Authorization" header: "%s"' % region)
def set_default_region_in_headers(headers):
auth_header = headers.get('Authorization')
if not auth_header:
return
replaced = re.sub(r'(.*Credential=[^/]+/[^/]+/)([^/])+/', r'\1%s/' % get_region(), auth_header)
headers['Authorization'] = replaced
def fix_account_id_in_arns(response, colon_delimiter=':', existing=None, replace=None):
""" Fix the account ID in the ARNs returned in the given Flask response or string """
existing = existing or ['123456789', '1234567890', MOTO_ACCOUNT_ID]
existing = existing if isinstance(existing, list) else [existing]
replace = replace or TEST_AWS_ACCOUNT_ID
is_str_obj = is_string_or_bytes(response)
content = to_str(response if is_str_obj else response._content)
replace = r'arn{col}aws{col}\1{col}\2{col}{acc}{col}'.format(col=colon_delimiter, acc=replace)
for acc_id in existing:
regex = r'arn{col}aws{col}([^:%]+){col}([^:%]*){col}{acc}{col}'.format(col=colon_delimiter, acc=acc_id)
content = re.sub(regex, replace, content)
if not is_str_obj:
response._content = content
response.headers['content-length'] = len(response._content)
return response
return content
def get_s3_client():
return boto3.resource('s3',
endpoint_url=config.TEST_S3_URL,
config=boto3.session.Config(s3={'addressing_style': 'path'}),
verify=False)
def sqs_queue_url_for_arn(queue_arn):
if '://' in queue_arn:
return queue_arn
if queue_arn in SQS_ARN_TO_URL_CACHE:
return SQS_ARN_TO_URL_CACHE[queue_arn]
sqs_client = connect_to_service('sqs')
parts = queue_arn.split(':')
result = sqs_client.get_queue_url(QueueName=parts[5], QueueOwnerAWSAccountId=parts[4])['QueueUrl']
SQS_ARN_TO_URL_CACHE[queue_arn] = result
return result
def extract_region_from_auth_header(headers):
auth = headers.get('Authorization') or ''
region = re.sub(r'.*Credential=[^/]+/[^/]+/([^/]+)/.*', r'\1', auth)
region = region or get_region()
return region
def extract_region_from_arn(arn):
parts = arn.split(':')
return parts[3] if len(parts) > 1 else None
def get_account_id(account_id=None, env=None):
if account_id:
return account_id
env = get_environment(env)
if is_local_env(env):
return os.environ['TEST_AWS_ACCOUNT_ID']
raise Exception('Unable to determine AWS account ID (%s, %s)' % (account_id, env))
def role_arn(role_name, account_id=None, env=None):
if not role_name:
return role_name
if role_name.startswith('arn:aws:iam::'):
return role_name
env = get_environment(env)
account_id = get_account_id(account_id, env=env)
return 'arn:aws:iam::%s:role/%s' % (account_id, role_name)
def iam_resource_arn(resource, role=None, env=None):
env = get_environment(env)
if not role:
role = get_iam_role(resource, env=env)
return role_arn(role_name=role, account_id=get_account_id())
def get_iam_role(resource, env=None):
env = get_environment(env)
return 'role-%s' % resource
def secretsmanager_secret_arn(secret_name, account_id=None, region_name=None):
pattern = 'arn:aws:secretsmanager:%s:%s:secret:%s'
return _resource_arn(secret_name, pattern, account_id=account_id, region_name=region_name)
def cloudformation_stack_arn(stack_name, account_id=None, region_name=None):
pattern = 'arn:aws:cloudformation:%s:%s:stack/%s/id-1234'
return _resource_arn(stack_name, pattern, account_id=account_id, region_name=region_name)
def dynamodb_table_arn(table_name, account_id=None, region_name=None):
pattern = 'arn:aws:dynamodb:%s:%s:table/%s'
return _resource_arn(table_name, pattern, account_id=account_id, region_name=region_name)
def dynamodb_stream_arn(table_name, latest_stream_label, account_id=None):
account_id = get_account_id(account_id)
return ('arn:aws:dynamodb:%s:%s:table/%s/stream/%s' %
(get_region(), account_id, table_name, latest_stream_label))
def log_group_arn(group_name, account_id=None, region_name=None):
pattern = 'arn:aws:logs:%s:%s:log-group:%s'
return _resource_arn(group_name, pattern, account_id=account_id, region_name=region_name)
def events_rule_arn(rule_name, account_id=None, region_name=None):
pattern = 'arn:aws:events:%s:%s:rule/%s'
return _resource_arn(rule_name, pattern, account_id=account_id, region_name=region_name)
def lambda_function_arn(function_name, account_id=None, region_name=None):
return lambda_function_or_layer_arn('function', function_name, account_id=account_id, region_name=region_name)
def lambda_layer_arn(layer_name, version=None, account_id=None):
return lambda_function_or_layer_arn('layer', layer_name, version=None, account_id=account_id)
def lambda_function_or_layer_arn(type, entity_name, version=None, account_id=None, region_name=None):
pattern = 'arn:aws:lambda:.*:.*:(function|layer):.*'
if re.match(pattern, entity_name):
return entity_name
if ':' in entity_name:
raise Exception('Lambda %s name should not contain a colon ":": %s' % (type, entity_name))
account_id = get_account_id(account_id)
region_name = region_name or get_region()
pattern = re.sub(r'\([^\|]+\|.+\)', type, pattern)
result = pattern.replace('.*', '%s') % (region_name, account_id, entity_name)
if version:
result = '%s:%s' % (result, version)
return result
def lambda_function_name(name_or_arn):
if ':' not in name_or_arn:
return name_or_arn
parts = name_or_arn.split(':')
# name is index #6 in pattern: arn:aws:lambda:.*:.*:function:.*
return parts[6]
def state_machine_arn(name, account_id=None, region_name=None):
pattern = 'arn:aws:states:%s:%s:stateMachine:%s'
return _resource_arn(name, pattern, account_id=account_id, region_name=region_name)
def stepfunctions_activity_arn(name, account_id=None, region_name=None):
pattern = 'arn:aws:states:%s:%s:activity:%s'
return _resource_arn(name, pattern, account_id=account_id, region_name=region_name)
def fix_arn(arn):
""" Function that attempts to "canonicalize" the given ARN. This includes converting
resource names to ARNs, replacing incorrect regions, account IDs, etc. """
if arn.startswith('arn:aws:lambda'):
parts = arn.split(':')
region = parts[3] if parts[3] in config.VALID_REGIONS else get_region()
return lambda_function_arn(lambda_function_name(arn), region_name=region)
LOG.warning('Unable to fix/canonicalize ARN: %s' % arn)
return arn
def cognito_user_pool_arn(user_pool_id, account_id=None, region_name=None):
pattern = 'arn:aws:cognito-idp:%s:%s:userpool/%s'
return _resource_arn(user_pool_id, pattern, account_id=account_id, region_name=region_name)
def kinesis_stream_arn(stream_name, account_id=None, region_name=None):
pattern = 'arn:aws:kinesis:%s:%s:stream/%s'
return _resource_arn(stream_name, pattern, account_id=account_id, region_name=region_name)
def firehose_stream_arn(stream_name, account_id=None, region_name=None):
pattern = 'arn:aws:firehose:%s:%s:deliverystream/%s'
return _resource_arn(stream_name, pattern, account_id=account_id, region_name=region_name)
def es_domain_arn(domain_name, account_id=None, region_name=None):
pattern = 'arn:aws:es:%s:%s:domain/%s'
return _resource_arn(domain_name, pattern, account_id=account_id, region_name=region_name)
def s3_bucket_arn(bucket_name, account_id=None):
return 'arn:aws:s3:::%s' % (bucket_name)
def _resource_arn(name, pattern, account_id=None, region_name=None):
if ':' in name:
return name
account_id = get_account_id(account_id)
region_name = region_name or get_region()
return pattern % (region_name, account_id, name)
def create_sqs_queue(queue_name, env=None):
env = get_environment(env)
# queue
conn = connect_to_service('sqs', env=env)
return conn.create_queue(QueueName=queue_name)
def sqs_queue_arn(queue_name, account_id=None, region_name=None):
account_id = get_account_id(account_id)
region_name = region_name or get_region()
return ('arn:aws:sqs:%s:%s:%s' % (region_name, account_id, queue_name))
def apigateway_restapi_arn(api_id, account_id=None, region_name=None):
account_id = get_account_id(account_id)
region_name = region_name or get_region()
return ('arn:aws:apigateway:%s:%s:/restapis/%s' % (region_name, account_id, api_id))
def sqs_queue_name(queue_arn):
parts = queue_arn.split(':')
return queue_arn if len(parts) == 1 else parts[5]
def sns_topic_arn(topic_name, account_id=None):
account_id = get_account_id(account_id)
return ('arn:aws:sns:%s:%s:%s' % (get_region(), account_id, topic_name))
def get_sqs_queue_url(queue_arn):
region_name = extract_region_from_arn(queue_arn)
queue_name = sqs_queue_name(queue_arn)
client = connect_to_service('sqs', region_name=region_name)
response = client.get_queue_url(QueueName=queue_name)
return response['QueueUrl']
def sqs_receive_message(queue_arn):
region_name = extract_region_from_arn(queue_arn)
client = connect_to_service('sqs', region_name=region_name)
queue_url = get_sqs_queue_url(queue_arn)
response = client.receive_message(QueueUrl=queue_url)
return response
def firehose_name(firehose_arn):
return firehose_arn.split('/')[-1]
def mock_aws_request_headers(service='dynamodb', region_name=None):
ctype = APPLICATION_AMZ_JSON_1_0
if service == 'kinesis':
ctype = APPLICATION_AMZ_JSON_1_1
elif service in ['sns', 'sqs']:
ctype = APPLICATION_X_WWW_FORM_URLENCODED
access_key = get_boto3_credentials().access_key
region_name = region_name or get_region()
headers = {
'Content-Type': ctype,
'Accept-Encoding': 'identity',
'X-Amz-Date': '20160623T103251Z',
'Authorization': ('AWS4-HMAC-SHA256 ' +
'Credential=%s/20160623/%s/%s/aws4_request, ' +
'SignedHeaders=content-type;host;x-amz-date;x-amz-target, Signature=1234') % (
access_key, region_name, service)
}
return headers
def dynamodb_get_item_raw(request):
headers = mock_aws_request_headers()
headers['X-Amz-Target'] = 'DynamoDB_20120810.GetItem'
new_item = make_http_request(url=config.TEST_DYNAMODB_URL,
method='POST', data=json.dumps(request), headers=headers)
new_item = new_item.text
new_item = new_item and json.loads(new_item)
return new_item
def create_dynamodb_table(table_name, partition_key, env=None, stream_view_type=None):
"""Utility method to create a DynamoDB table"""
dynamodb = connect_to_service('dynamodb', env=env, client=True)
stream_spec = {'StreamEnabled': False}
key_schema = [{
'AttributeName': partition_key,
'KeyType': 'HASH'
}]
attr_defs = [{
'AttributeName': partition_key,
'AttributeType': 'S'
}]
if stream_view_type is not None:
stream_spec = {
'StreamEnabled': True,
'StreamViewType': stream_view_type
}
table = None
try:
table = dynamodb.create_table(TableName=table_name, KeySchema=key_schema,
AttributeDefinitions=attr_defs, ProvisionedThroughput={
'ReadCapacityUnits': 10, 'WriteCapacityUnits': 10
},
StreamSpecification=stream_spec
)
except Exception as e:
if 'ResourceInUseException' in str(e):
# Table already exists -> return table reference
return connect_to_resource('dynamodb', env=env).Table(table_name)
time.sleep(2)
return table
def get_apigateway_integration(api_id, method, path, env=None):
apigateway = connect_to_service(service_name='apigateway', client=True, env=env)
resources = apigateway.get_resources(restApiId=api_id, limit=100)
resource_id = None
for r in resources['items']:
if r['path'] == path:
resource_id = r['id']
if not resource_id:
raise Exception('Unable to find apigateway integration for path "%s"' % path)
integration = apigateway.get_integration(
restApiId=api_id, resourceId=resource_id, httpMethod=method
)
return integration
def get_apigateway_resource_for_path(api_id, path, parent=None, resources=None):
if resources is None:
apigateway = connect_to_service(service_name='apigateway')
resources = apigateway.get_resources(restApiId=api_id, limit=100)
if not isinstance(path, list):
path = path.split('/')
if not path:
return parent
for resource in resources:
if resource['pathPart'] == path[0] and (not parent or parent['id'] == resource['parentId']):
return get_apigateway_resource_for_path(api_id, path[1:], parent=resource, resources=resources)
return None
def get_apigateway_path_for_resource(api_id, resource_id, path_suffix='', resources=None, region_name=None):
if resources is None:
apigateway = connect_to_service(service_name='apigateway', region_name=region_name)
resources = apigateway.get_resources(restApiId=api_id, limit=100)['items']
target_resource = list(filter(lambda res: res['id'] == resource_id, resources))[0]
path_part = target_resource.get('pathPart', '')
if path_suffix:
if path_part:
path_suffix = '%s/%s' % (path_part, path_suffix)
else:
path_suffix = path_part
parent_id = target_resource.get('parentId')
if not parent_id:
return '/%s' % path_suffix
return get_apigateway_path_for_resource(api_id, parent_id,
path_suffix=path_suffix, resources=resources, region_name=region_name)
def create_api_gateway(name, description=None, resources=None, stage_name=None,
enabled_api_keys=[], env=None, usage_plan_name=None, region_name=None):
client = connect_to_service('apigateway', env=env, region_name=region_name)
if not resources:
resources = []
if not stage_name:
stage_name = 'testing'
if not usage_plan_name:
usage_plan_name = 'Basic Usage'
if not description:
description = 'Test description for API "%s"' % name
LOG.info('Creating API resources under API Gateway "%s".' % name)
api = client.create_rest_api(name=name, description=description)
# list resources
api_id = api['id']
resources_list = client.get_resources(restApiId=api_id)
root_res_id = resources_list['items'][0]['id']
# add API resources and methods
for path, methods in six.iteritems(resources):
# create resources recursively
parent_id = root_res_id
for path_part in path.split('/'):
api_resource = client.create_resource(restApiId=api_id, parentId=parent_id, pathPart=path_part)
parent_id = api_resource['id']
# add methods to the API resource
for method in methods:
client.put_method(
restApiId=api_id,
resourceId=api_resource['id'],
httpMethod=method['httpMethod'],
authorizationType=method.get('authorizationType') or 'NONE',
apiKeyRequired=method.get('apiKeyRequired') or False
)
# create integrations for this API resource/method
integrations = method['integrations']
create_api_gateway_integrations(api_id, api_resource['id'], method,
integrations, env=env, region_name=region_name)
# deploy the API gateway
client.create_deployment(restApiId=api_id, stageName=stage_name)
return api
def create_api_gateway_integrations(api_id, resource_id, method,
integrations=[], env=None, region_name=None):
client = connect_to_service('apigateway', env=env, region_name=region_name)
for integration in integrations:
req_templates = integration.get('requestTemplates') or {}
res_templates = integration.get('responseTemplates') or {}
success_code = integration.get('successCode') or '200'
client_error_code = integration.get('clientErrorCode') or '400'
server_error_code = integration.get('serverErrorCode') or '500'
# create integration
client.put_integration(
restApiId=api_id,
resourceId=resource_id,
httpMethod=method['httpMethod'],
integrationHttpMethod=method.get('integrationHttpMethod') or method['httpMethod'],
type=integration['type'],
uri=integration['uri'],
requestTemplates=req_templates
)
response_configs = [
{'pattern': '^2.*', 'code': success_code, 'res_templates': res_templates},
{'pattern': '^4.*', 'code': client_error_code, 'res_templates': {}},
{'pattern': '^5.*', 'code': server_error_code, 'res_templates': {}}
]
# create response configs
for response_config in response_configs:
# create integration response
client.put_integration_response(
restApiId=api_id,
resourceId=resource_id,
httpMethod=method['httpMethod'],
statusCode=response_config['code'],
responseTemplates=response_config['res_templates'],
selectionPattern=response_config['pattern']
)
# create method response
client.put_method_response(
restApiId=api_id,
resourceId=resource_id,
httpMethod=method['httpMethod'],
statusCode=response_config['code']
)
def apigateway_invocations_arn(lambda_uri):
return ('arn:aws:apigateway:%s:lambda:path/2015-03-31/functions/%s/invocations' %
(get_region(), lambda_uri))
def get_elasticsearch_endpoint(domain=None, region_name=None):
env = get_environment(region_name=region_name)
if is_local_env(env):
return os.environ['TEST_ELASTICSEARCH_URL']
# get endpoint from API
es_client = connect_to_service(service_name='es', region_name=env.region)
info = es_client.describe_elasticsearch_domain(DomainName=domain)
endpoint = 'https://%s' % info['DomainStatus']['Endpoint']
return endpoint
def connect_elasticsearch(endpoint=None, domain=None, region_name=None, env=None):
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
env = get_environment(env, region_name=region_name)
verify_certs = False
use_ssl = False
if not endpoint and is_local_env(env):
endpoint = os.environ['TEST_ELASTICSEARCH_URL']
if not endpoint and not is_local_env(env) and domain:
endpoint = get_elasticsearch_endpoint(domain=domain, region_name=env.region)
# use ssl?
if 'https://' in endpoint:
use_ssl = True
if not is_local_env(env):
verify_certs = True
if CUSTOM_BOTO3_SESSION or (ENV_ACCESS_KEY in os.environ and ENV_SECRET_KEY in os.environ):
access_key = os.environ.get(ENV_ACCESS_KEY)
secret_key = os.environ.get(ENV_SECRET_KEY)
session_token = os.environ.get(ENV_SESSION_TOKEN)
if CUSTOM_BOTO3_SESSION:
credentials = CUSTOM_BOTO3_SESSION.get_credentials()
access_key = credentials.access_key
secret_key = credentials.secret_key
session_token = credentials.token
awsauth = AWS4Auth(access_key, secret_key, env.region, 'es', session_token=session_token)
connection_class = RequestsHttpConnection
return Elasticsearch(hosts=[endpoint], verify_certs=verify_certs, use_ssl=use_ssl,
connection_class=connection_class, http_auth=awsauth)
return Elasticsearch(hosts=[endpoint], verify_certs=verify_certs, use_ssl=use_ssl)
def create_kinesis_stream(stream_name, shards=1, env=None, delete=False):
env = get_environment(env)
# stream
stream = KinesisStream(id=stream_name, num_shards=shards)
conn = connect_to_service('kinesis', env=env)
stream.connect(conn)
if delete:
run_safe(lambda: stream.destroy(), print_error=False)
stream.create()
stream.wait_for()
return stream
def kinesis_get_latest_records(stream_name, shard_id, count=10, env=None):
kinesis = connect_to_service('kinesis', env=env)
result = []
response = kinesis.get_shard_iterator(StreamName=stream_name, ShardId=shard_id,
ShardIteratorType='TRIM_HORIZON')
shard_iterator = response['ShardIterator']
while shard_iterator:
records_response = kinesis.get_records(ShardIterator=shard_iterator)
records = records_response['Records']
for record in records:
try:
record['Data'] = to_str(record['Data'])
except Exception:
pass
result.extend(records)
shard_iterator = records_response['NextShardIterator'] if records else False
while len(result) > count:
result.pop(0)
return result
| 37.607311 | 119 | 0.690759 |
6150bdcf6b347b14b2907b0d8194743cb5e778cf | 13,955 | py | Python | code/python/QuotesAPIforDigitalPortals/v3/fds/sdk/QuotesAPIforDigitalPortals/model/inline_response20033_data_categories.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 6 | 2022-02-07T16:34:18.000Z | 2022-03-30T08:04:57.000Z | code/python/QuotesAPIforDigitalPortals/v3/fds/sdk/QuotesAPIforDigitalPortals/model/inline_response20033_data_categories.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 2 | 2022-02-07T05:25:57.000Z | 2022-03-07T14:18:04.000Z | code/python/QuotesAPIforDigitalPortals/v3/fds/sdk/QuotesAPIforDigitalPortals/model/inline_response20033_data_categories.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | null | null | null | """
Quotes API For Digital Portals
The quotes API combines endpoints for retrieving security end-of-day, delayed, and realtime prices with performance key figures and basic reference data on the security and market level. The API supports over 20 different price types for each quote and comes with basic search endpoints based on security identifiers and instrument names. Market coverage is included in the *Sample Use Cases* section below. The Digital Portal use case is focused on high-performance applications that are * serving millions of end-users, * accessible by client browsers via the internet, * supporting subscriptions for streamed updates out-of-the-box, * typically combining a wide variety of *for Digital Portals*-APIs into a highly use-case specific solution for customers, * integrated into complex infrastructures such as existing frontend frameworks, authentication services. All APIs labelled *for Digital Portals* have been designed for direct use by client web applications and feature extreme low latency: The average response time across all endpoints is 30 ms whereas 99% of all requests are answered in close to under 300ms. See the Time Series API for Digital Portals for direct access to price histories, and the News API for Digital Portals for searching and fetching related news. # noqa: E501
The version of the OpenAPI document: 2
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.QuotesAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.QuotesAPIforDigitalPortals.exceptions import ApiAttributeError
def lazy_import():
from fds.sdk.QuotesAPIforDigitalPortals.model.inline_response20033_data_parent1 import InlineResponse20033DataParent1
globals()['InlineResponse20033DataParent1'] = InlineResponse20033DataParent1
class InlineResponse20033DataCategories(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'id': (float,), # noqa: E501
'code': (str,), # noqa: E501
'name': (str,), # noqa: E501
'description': (str,), # noqa: E501
'active': (bool,), # noqa: E501
'parent': (InlineResponse20033DataParent1,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'code': 'code', # noqa: E501
'name': 'name', # noqa: E501
'description': 'description', # noqa: E501
'active': 'active', # noqa: E501
'parent': 'parent', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InlineResponse20033DataCategories - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (float): Identifier of the category.. [optional] # noqa: E501
code (str): Code commonly used for this category.. [optional] # noqa: E501
name (str): Name of the category.. [optional] # noqa: E501
description (str): Description of the category.. [optional] # noqa: E501
active (bool): Indicates whether this category is active.. [optional] # noqa: E501
parent (InlineResponse20033DataParent1): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InlineResponse20033DataCategories - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (float): Identifier of the category.. [optional] # noqa: E501
code (str): Code commonly used for this category.. [optional] # noqa: E501
name (str): Name of the category.. [optional] # noqa: E501
description (str): Description of the category.. [optional] # noqa: E501
active (bool): Indicates whether this category is active.. [optional] # noqa: E501
parent (InlineResponse20033DataParent1): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 49.485816 | 1,302 | 0.598352 |
be0f5e9e924adb858a804241cda57bb58bab0ea1 | 931 | bzl | Python | third_party/llvm/workspace.bzl | ckkuang/tensorflow | ec1920986d94a5a1bc42aa0961938a2d93c290e2 | [
"Apache-2.0"
] | 1 | 2021-02-28T03:43:38.000Z | 2021-02-28T03:43:38.000Z | third_party/llvm/workspace.bzl | ckkuang/tensorflow | ec1920986d94a5a1bc42aa0961938a2d93c290e2 | [
"Apache-2.0"
] | null | null | null | third_party/llvm/workspace.bzl | ckkuang/tensorflow | ec1920986d94a5a1bc42aa0961938a2d93c290e2 | [
"Apache-2.0"
] | null | null | null | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "628dda08b82fcedcd9e89c9ef7850388e988bf68"
LLVM_SHA256 = "b48c8c63c17631cc0160e1359c1e977188aa6cf5924cfd4b5664397effe65f30"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| 38.791667 | 149 | 0.646617 |
3961257e533f8360e244e58a4a922b9bb515e57b | 2,027 | py | Python | posthog/models/filters/path_filter.py | brave-care/posthog | 8edd14a16ad936fb241dcf856925e9f2ea87cba4 | [
"MIT"
] | 1 | 2021-07-28T19:44:48.000Z | 2021-07-28T19:44:48.000Z | posthog/models/filters/path_filter.py | brave-care/posthog | 8edd14a16ad936fb241dcf856925e9f2ea87cba4 | [
"MIT"
] | null | null | null | posthog/models/filters/path_filter.py | brave-care/posthog | 8edd14a16ad936fb241dcf856925e9f2ea87cba4 | [
"MIT"
] | null | null | null | from typing import Any, Dict, Optional
from rest_framework.request import Request
from posthog.constants import INSIGHT_PATHS
from posthog.models.filters.base_filter import BaseFilter
from posthog.models.filters.mixins.common import (
BreakdownMixin,
BreakdownTypeMixin,
DateMixin,
EntitiesMixin,
FilterTestAccountsMixin,
InsightMixin,
IntervalMixin,
LimitMixin,
OffsetMixin,
)
from posthog.models.filters.mixins.funnel import FunnelCorrelationMixin, FunnelPersonsStepMixin, FunnelWindowMixin
from posthog.models.filters.mixins.paths import (
ComparatorDerivedMixin,
EndPointMixin,
FunnelPathsMixin,
PathGroupingMixin,
PathLimitsMixin,
PathPersonsMixin,
PathReplacementMixin,
PathStepLimitMixin,
PropTypeDerivedMixin,
StartPointMixin,
TargetEventDerivedMixin,
TargetEventsMixin,
)
from posthog.models.filters.mixins.property import PropertyMixin
from posthog.models.filters.mixins.simplify import SimplifyFilterMixin
class PathFilter(
StartPointMixin,
EndPointMixin,
TargetEventDerivedMixin,
ComparatorDerivedMixin,
PropTypeDerivedMixin,
PropertyMixin,
IntervalMixin,
InsightMixin,
FilterTestAccountsMixin,
DateMixin,
BreakdownMixin,
BreakdownTypeMixin,
EntitiesMixin,
PathStepLimitMixin,
FunnelPathsMixin,
TargetEventsMixin,
FunnelWindowMixin,
FunnelPersonsStepMixin,
PathGroupingMixin,
PathReplacementMixin,
PathPersonsMixin,
LimitMixin,
OffsetMixin,
PathLimitsMixin,
FunnelCorrelationMixin, # Typing pain because ColumnOptimizer expects a uniform filter
SimplifyFilterMixin,
# TODO: proper fix for EventQuery abstraction
BaseFilter,
):
def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None:
if data:
data["insight"] = INSIGHT_PATHS
else:
data = {"insight": INSIGHT_PATHS}
super().__init__(data, request, **kwargs)
| 27.767123 | 115 | 0.743957 |
21222c2e2e20cab07427185079c756ec3b53fc82 | 52 | py | Python | signals/signals/apps/entities/__init__.py | gonzaloamadio/django-signals2 | 809d1c0b627f5ec68e7bc4f73fdc7de7e7545706 | [
"MIT"
] | null | null | null | signals/signals/apps/entities/__init__.py | gonzaloamadio/django-signals2 | 809d1c0b627f5ec68e7bc4f73fdc7de7e7545706 | [
"MIT"
] | null | null | null | signals/signals/apps/entities/__init__.py | gonzaloamadio/django-signals2 | 809d1c0b627f5ec68e7bc4f73fdc7de7e7545706 | [
"MIT"
] | null | null | null | #default_app_config ='entities.apps.EntitiesConfig'
| 26 | 51 | 0.846154 |
99351389700510188e50cdcf6eae3482ee642548 | 2,287 | py | Python | data/crawlKoreaRegionalData.py | dentobox/hereitis | 3f0d6ceba48675149f42bea84aebd3960da38927 | [
"MIT"
] | null | null | null | data/crawlKoreaRegionalData.py | dentobox/hereitis | 3f0d6ceba48675149f42bea84aebd3960da38927 | [
"MIT"
] | null | null | null | data/crawlKoreaRegionalData.py | dentobox/hereitis | 3f0d6ceba48675149f42bea84aebd3960da38927 | [
"MIT"
] | null | null | null | import requests
import re
from bs4 import BeautifulSoup
import json
print("#####################################")
print("############ 한국 데이터 #############")
print("######## koreaRegionalData.js #########")
html = requests.get("http://ncov.mohw.go.kr/bdBoardList_Real.do?brdId=1&brdGubun=13&ncvContSeq=&contSeq=&board_id=&gubun=").text
# print(html)
soup = BeautifulSoup(html, 'html.parser')
updated = soup.select('.timetable > .info > span')[0].text # 업데이트날짜
# datas = soup.select('#maplayout > button')
datas = soup.select('.rpsa_detail > div > div')
# print(datas)
# print(datas[0])
# datas = datas[1:]
confirmed_region = [] # 시도별확진자
count = 0
for d in datas:
region = d.find_all('h4', class_='cityname')[0].text # 지역이름
confirmed = int(d.find_all('span', class_='num')[0].text.replace(',', '')) # 확진자수
recovered = int(d.find_all('span', class_='num')[2].text.replace(',', '')) # 격리해제수
deaths = int(d.find_all('span', class_='num')[1].text.replace(',', '')) # 사망자수
confirmed_rate = float(d.find_all('span', class_='num')[3].text.replace('-', '0')) # 십만명당발생율
confirmed_region_rate = ''
if count != 0:
슬라이싱 = d.find_all('p', class_='citytit')[0].text
confirmed_region_rate = float(슬라이싱[:슬라이싱.find('%')]) # 지역별확진자비율
confirmed_region.append({
'지역이름' : region,
'확진자수' : confirmed,
'격리해제수' : recovered,
'사망자수' : deaths,
'십만명당발생율' : confirmed_rate,
'지역별확진자비율' : confirmed_region_rate,
})
print(count)
count += 1
# 삭제된 데이터 확인
# print(f'삭제된 데이터 : {시도별확진자[0]}')
confirmed_region.append({'업데이트날짜': updated})
print(confirmed_region)
with open("./data/koreaRegionalData.js", "w", encoding='UTF-8-sig') as json_file:
json.dump(confirmed_region, json_file, ensure_ascii=False, indent=4)
data = ''
with open("./data/koreaRegionalData.js", "r", encoding='UTF-8-sig') as f:
while True:
line = f.readline()
if not line: break
data += line
data = '//Auto-generated by crawlKoreaRegionalData.py\nvar koreaRegionalData = ' + data + ';'
with open("./data/koreaRegionalData.js", "w", encoding='UTF-8-sig') as f_write:
f_write.write(data)
print("############### 완료!! ###############")
print("#####################################")
| 33.632353 | 128 | 0.591605 |
a4761d4867d99fbcd9b01d73dd7a87b823dd53f2 | 1,809 | py | Python | tests/conftest.py | lnielsen/flask-resources | a8be12765883edb1b1f63b4d289416e892d2cb02 | [
"MIT"
] | null | null | null | tests/conftest.py | lnielsen/flask-resources | a8be12765883edb1b1f63b4d289416e892d2cb02 | [
"MIT"
] | null | null | null | tests/conftest.py | lnielsen/flask-resources | a8be12765883edb1b1f63b4d289416e892d2cb02 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# Flask--Resources is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration.
See https://pytest-invenio.readthedocs.io/ for documentation on which test
fixtures are available.
"""
import pytest
from flask import Flask
from flask_resources.context import resource_requestctx
from flask_resources.resources import CollectionResource, Resource, ResourceConfig
class CustomResourceConfig(ResourceConfig):
"""Custom resource configuration."""
item_route = "/custom/<id>"
list_route = "/custom/"
class CustomResource(CollectionResource):
"""Custom resource implementation."""
def __init__(self, *args, **kwargs):
"""Constructor."""
super(CustomResource, self).__init__(
config=CustomResourceConfig, *args, **kwargs
)
self.db = {}
def search(self):
"""Search."""
query = resource_requestctx.request_args.get("q", "")
resp = []
for key, value in self.db.items():
if query in key or query in value:
resp.append({"id": key, "content": value})
return 200, resp
def create(self, obj):
"""Create."""
self.db[obj["id"]] = obj["content"]
return 201, self.db
def read(self, id):
"""Read."""
return 200, {"id": id, "content": self.db[id]}
@pytest.fixture(scope="module")
def app():
"""Application factory fixture."""
app_ = Flask(__name__)
default_bp = Resource().as_blueprint("default_resource")
app_.register_blueprint(default_bp)
custom_bp = CustomResource().as_blueprint("custom_resource")
app_.register_blueprint(custom_bp)
return app_
| 25.842857 | 82 | 0.648425 |
b977d638744e2604b510a3f447c6723e4b98aec3 | 324 | py | Python | login.py | ksypf/py6_flask | c65816eea236a535e0301c63605ee053ad933fe9 | [
"MIT"
] | null | null | null | login.py | ksypf/py6_flask | c65816eea236a535e0301c63605ee053ad933fe9 | [
"MIT"
] | null | null | null | login.py | ksypf/py6_flask | c65816eea236a535e0301c63605ee053ad933fe9 | [
"MIT"
] | null | null | null | print("zhangsan add login.py")
num=10
print("第二次修改")
print("manage修改代码")
num=20
print("zhangsan第三次修改代码")
num=40
print("manage第二次修改了代码")
num=30
print("manage第三次修改代码添加了新的功能")
print("上线版")
print("manage第四次 修改了代码test")
print("dev分支合并")
print("manage第五次修改了代码test2")
print("dev分支合并")
print("第二次dev合并")
print("hello")
| 10.451613 | 30 | 0.722222 |
9d7e25564bf4d9b3c4329112010d4294a4c55f43 | 1,137 | py | Python | tests/test_store_types.py | Cologler/bytecode2ast-python | 407b261a493e018bc86388040ddfb6fb0e4b96d9 | [
"MIT"
] | null | null | null | tests/test_store_types.py | Cologler/bytecode2ast-python | 407b261a493e018bc86388040ddfb6fb0e4b96d9 | [
"MIT"
] | null | null | null | tests/test_store_types.py | Cologler/bytecode2ast-python | 407b261a493e018bc86388040ddfb6fb0e4b96d9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2019~2999 - Cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from utils import get_instrs_from_b2a, get_instrs
def test_store_none():
def func():
a = None
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_true():
def func():
a = True
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_false():
def func():
a = False
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_pack():
def func():
a = b, c
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_unpack():
def func():
a, b = c
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_multi_assign():
def func():
a, b = c, d
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_multi_assign_reverse():
def func():
x, y = y, x
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_store_chain_assign():
def func():
x = y = z = i = j = k
assert get_instrs(func) == get_instrs_from_b2a(func)
| 19.947368 | 56 | 0.62445 |
a57f8e5dcbf64422cd3c0120a581f6cca3116936 | 25,506 | py | Python | google/analytics/data_v1alpha/types/analytics_data_api.py | LaudateCorpus1/python-analytics-data | ad51ffd1c461663d7ff055b69166004ea5a4d686 | [
"Apache-2.0"
] | 45 | 2020-09-19T11:36:36.000Z | 2022-03-14T17:17:21.000Z | google/analytics/data_v1alpha/types/analytics_data_api.py | LaudateCorpus1/python-analytics-data | ad51ffd1c461663d7ff055b69166004ea5a4d686 | [
"Apache-2.0"
] | 66 | 2020-09-14T22:03:10.000Z | 2022-03-31T17:18:32.000Z | google/analytics/data_v1alpha/types/analytics_data_api.py | LaudateCorpus1/python-analytics-data | ad51ffd1c461663d7ff055b69166004ea5a4d686 | [
"Apache-2.0"
] | 14 | 2020-09-14T21:57:15.000Z | 2022-03-31T00:36:30.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.analytics.data_v1alpha.types import data
__protobuf__ = proto.module(
package="google.analytics.data.v1alpha",
manifest={
"Metadata",
"RunReportRequest",
"RunReportResponse",
"RunPivotReportRequest",
"RunPivotReportResponse",
"BatchRunReportsRequest",
"BatchRunReportsResponse",
"BatchRunPivotReportsRequest",
"BatchRunPivotReportsResponse",
"GetMetadataRequest",
"RunRealtimeReportRequest",
"RunRealtimeReportResponse",
},
)
class Metadata(proto.Message):
r"""The dimensions and metrics currently accepted in reporting
methods.
Attributes:
name (str):
Resource name of this metadata.
dimensions (Sequence[google.analytics.data_v1alpha.types.DimensionMetadata]):
The dimension descriptions.
metrics (Sequence[google.analytics.data_v1alpha.types.MetricMetadata]):
The metric descriptions.
"""
name = proto.Field(proto.STRING, number=3,)
dimensions = proto.RepeatedField(
proto.MESSAGE, number=1, message=data.DimensionMetadata,
)
metrics = proto.RepeatedField(proto.MESSAGE, number=2, message=data.MetricMetadata,)
class RunReportRequest(proto.Message):
r"""The request to generate a report.
Attributes:
entity (google.analytics.data_v1alpha.types.Entity):
A property whose events are tracked. Within a
batch request, this entity should either be
unspecified or consistent with the batch-level
entity.
dimensions (Sequence[google.analytics.data_v1alpha.types.Dimension]):
The dimensions requested and displayed.
metrics (Sequence[google.analytics.data_v1alpha.types.Metric]):
The metrics requested and displayed.
date_ranges (Sequence[google.analytics.data_v1alpha.types.DateRange]):
Date ranges of data to read. If multiple date ranges are
requested, each response row will contain a zero based date
range index. If two date ranges overlap, the event data for
the overlapping days is included in the response rows for
both date ranges. In a cohort request, this ``dateRanges``
must be unspecified.
offset (int):
The row count of the start row. The first row is counted as
row 0.
To learn more about this pagination parameter, see
`Pagination <https://developers.google.com/analytics/devguides/reporting/data/v1/basics#pagination>`__.
limit (int):
The number of rows to return. If unspecified, 10 rows are
returned. If -1, all rows are returned.
To learn more about this pagination parameter, see
`Pagination <https://developers.google.com/analytics/devguides/reporting/data/v1/basics#pagination>`__.
metric_aggregations (Sequence[google.analytics.data_v1alpha.types.MetricAggregation]):
Aggregation of metrics. Aggregated metric values will be
shown in rows where the dimension_values are set to
"RESERVED_(MetricAggregation)".
dimension_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of dimensions. Dimensions
must be requested to be used in this filter.
Metrics cannot be used in this filter.
metric_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of metrics. Applied at post
aggregation phase, similar to SQL having-clause.
Metrics must be requested to be used in this
filter. Dimensions cannot be used in this
filter.
order_bys (Sequence[google.analytics.data_v1alpha.types.OrderBy]):
Specifies how rows are ordered in the
response.
currency_code (str):
A currency code in ISO4217 format, such as
"AED", "USD", "JPY". If the field is empty, the
report uses the entity's default currency.
cohort_spec (google.analytics.data_v1alpha.types.CohortSpec):
Cohort group associated with this request. If
there is a cohort group in the request the
'cohort' dimension must be present.
keep_empty_rows (bool):
If false or unspecified, each row with all
metrics equal to 0 will not be returned. If
true, these rows will be returned if they are
not separately removed by a filter.
return_property_quota (bool):
Toggles whether to return the current state of this
Analytics Property's quota. Quota is returned in
`PropertyQuota <#PropertyQuota>`__.
"""
entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,)
dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,)
offset = proto.Field(proto.INT64, number=5,)
limit = proto.Field(proto.INT64, number=6,)
metric_aggregations = proto.RepeatedField(
proto.ENUM, number=7, enum=data.MetricAggregation,
)
dimension_filter = proto.Field(
proto.MESSAGE, number=8, message=data.FilterExpression,
)
metric_filter = proto.Field(proto.MESSAGE, number=9, message=data.FilterExpression,)
order_bys = proto.RepeatedField(proto.MESSAGE, number=10, message=data.OrderBy,)
currency_code = proto.Field(proto.STRING, number=11,)
cohort_spec = proto.Field(proto.MESSAGE, number=12, message=data.CohortSpec,)
keep_empty_rows = proto.Field(proto.BOOL, number=13,)
return_property_quota = proto.Field(proto.BOOL, number=14,)
class RunReportResponse(proto.Message):
r"""The response report table corresponding to a request.
Attributes:
dimension_headers (Sequence[google.analytics.data_v1alpha.types.DimensionHeader]):
Describes dimension columns. The number of
DimensionHeaders and ordering of
DimensionHeaders matches the dimensions present
in rows.
metric_headers (Sequence[google.analytics.data_v1alpha.types.MetricHeader]):
Describes metric columns. The number of
MetricHeaders and ordering of MetricHeaders
matches the metrics present in rows.
rows (Sequence[google.analytics.data_v1alpha.types.Row]):
Rows of dimension value combinations and
metric values in the report.
totals (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the totaled values of metrics.
maximums (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the maximum values of metrics.
minimums (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the minimum values of metrics.
row_count (int):
The total number of rows in the query result, regardless of
the number of rows returned in the response. For example if
a query returns 175 rows and includes limit = 50 in the API
request, the response will contain row_count = 175 but only
50 rows.
To learn more about this pagination parameter, see
`Pagination <https://developers.google.com/analytics/devguides/reporting/data/v1/basics#pagination>`__.
metadata (google.analytics.data_v1alpha.types.ResponseMetaData):
Metadata for the report.
property_quota (google.analytics.data_v1alpha.types.PropertyQuota):
This Analytics Property's quota state
including this request.
"""
dimension_headers = proto.RepeatedField(
proto.MESSAGE, number=11, message=data.DimensionHeader,
)
metric_headers = proto.RepeatedField(
proto.MESSAGE, number=1, message=data.MetricHeader,
)
rows = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Row,)
totals = proto.RepeatedField(proto.MESSAGE, number=8, message=data.Row,)
maximums = proto.RepeatedField(proto.MESSAGE, number=9, message=data.Row,)
minimums = proto.RepeatedField(proto.MESSAGE, number=10, message=data.Row,)
row_count = proto.Field(proto.INT32, number=12,)
metadata = proto.Field(proto.MESSAGE, number=6, message=data.ResponseMetaData,)
property_quota = proto.Field(proto.MESSAGE, number=7, message=data.PropertyQuota,)
class RunPivotReportRequest(proto.Message):
r"""The request to generate a pivot report.
Attributes:
entity (google.analytics.data_v1alpha.types.Entity):
A property whose events are tracked. Within a
batch request, this entity should either be
unspecified or consistent with the batch-level
entity.
dimensions (Sequence[google.analytics.data_v1alpha.types.Dimension]):
The dimensions requested. All defined dimensions must be
used by one of the following: dimension_expression,
dimension_filter, pivots, order_bys.
metrics (Sequence[google.analytics.data_v1alpha.types.Metric]):
The metrics requested, at least one metric needs to be
specified. All defined metrics must be used by one of the
following: metric_expression, metric_filter, order_bys.
dimension_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of dimensions. Dimensions
must be requested to be used in this filter.
Metrics cannot be used in this filter.
metric_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of metrics. Applied at post
aggregation phase, similar to SQL having-clause.
Metrics must be requested to be used in this
filter. Dimensions cannot be used in this
filter.
pivots (Sequence[google.analytics.data_v1alpha.types.Pivot]):
Describes the visual format of the report's
dimensions in columns or rows. The union of the
fieldNames (dimension names) in all pivots must
be a subset of dimension names defined in
Dimensions. No two pivots can share a dimension.
A dimension is only visible if it appears in a
pivot.
date_ranges (Sequence[google.analytics.data_v1alpha.types.DateRange]):
The date range to retrieve event data for the report. If
multiple date ranges are specified, event data from each
date range is used in the report. A special dimension with
field name "dateRange" can be included in a Pivot's field
names; if included, the report compares between date ranges.
In a cohort request, this ``dateRanges`` must be
unspecified.
currency_code (str):
A currency code in ISO4217 format, such as
"AED", "USD", "JPY". If the field is empty, the
report uses the entity's default currency.
cohort_spec (google.analytics.data_v1alpha.types.CohortSpec):
Cohort group associated with this request. If
there is a cohort group in the request the
'cohort' dimension must be present.
keep_empty_rows (bool):
If false or unspecified, each row with all
metrics equal to 0 will not be returned. If
true, these rows will be returned if they are
not separately removed by a filter.
return_property_quota (bool):
Toggles whether to return the current state of this
Analytics Property's quota. Quota is returned in
`PropertyQuota <#PropertyQuota>`__.
"""
entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,)
dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
dimension_filter = proto.Field(
proto.MESSAGE, number=4, message=data.FilterExpression,
)
metric_filter = proto.Field(proto.MESSAGE, number=5, message=data.FilterExpression,)
pivots = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Pivot,)
date_ranges = proto.RepeatedField(proto.MESSAGE, number=7, message=data.DateRange,)
currency_code = proto.Field(proto.STRING, number=8,)
cohort_spec = proto.Field(proto.MESSAGE, number=9, message=data.CohortSpec,)
keep_empty_rows = proto.Field(proto.BOOL, number=10,)
return_property_quota = proto.Field(proto.BOOL, number=11,)
class RunPivotReportResponse(proto.Message):
r"""The response pivot report table corresponding to a pivot
request.
Attributes:
pivot_headers (Sequence[google.analytics.data_v1alpha.types.PivotHeader]):
Summarizes the columns and rows created by a pivot. Each
pivot in the request produces one header in the response. If
we have a request like this:
::
"pivots": [{
"fieldNames": ["country",
"city"]
},
{
"fieldNames": "eventName"
}]
We will have the following ``pivotHeaders`` in the response:
::
"pivotHeaders" : [{
"dimensionHeaders": [{
"dimensionValues": [
{ "value": "United Kingdom" },
{ "value": "London" }
]
},
{
"dimensionValues": [
{ "value": "Japan" },
{ "value": "Osaka" }
]
}]
},
{
"dimensionHeaders": [{
"dimensionValues": [{ "value": "session_start" }]
},
{
"dimensionValues": [{ "value": "scroll" }]
}]
}]
dimension_headers (Sequence[google.analytics.data_v1alpha.types.DimensionHeader]):
Describes dimension columns. The number of
DimensionHeaders and ordering of
DimensionHeaders matches the dimensions present
in rows.
metric_headers (Sequence[google.analytics.data_v1alpha.types.MetricHeader]):
Describes metric columns. The number of
MetricHeaders and ordering of MetricHeaders
matches the metrics present in rows.
rows (Sequence[google.analytics.data_v1alpha.types.Row]):
Rows of dimension value combinations and
metric values in the report.
aggregates (Sequence[google.analytics.data_v1alpha.types.Row]):
Aggregation of metric values. Can be totals, minimums, or
maximums. The returned aggregations are controlled by the
metric_aggregations in the pivot. The type of aggregation
returned in each row is shown by the dimension_values which
are set to "RESERVED\_".
metadata (google.analytics.data_v1alpha.types.ResponseMetaData):
Metadata for the report.
property_quota (google.analytics.data_v1alpha.types.PropertyQuota):
This Analytics Property's quota state
including this request.
"""
pivot_headers = proto.RepeatedField(
proto.MESSAGE, number=1, message=data.PivotHeader,
)
dimension_headers = proto.RepeatedField(
proto.MESSAGE, number=7, message=data.DimensionHeader,
)
metric_headers = proto.RepeatedField(
proto.MESSAGE, number=2, message=data.MetricHeader,
)
rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,)
aggregates = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,)
metadata = proto.Field(proto.MESSAGE, number=5, message=data.ResponseMetaData,)
property_quota = proto.Field(proto.MESSAGE, number=6, message=data.PropertyQuota,)
class BatchRunReportsRequest(proto.Message):
r"""The batch request containing multiple report requests.
Attributes:
entity (google.analytics.data_v1alpha.types.Entity):
A property whose events are tracked. This
entity must be specified for the batch. The
entity within RunReportRequest may either be
unspecified or consistent with this entity.
requests (Sequence[google.analytics.data_v1alpha.types.RunReportRequest]):
Individual requests. Each request has a
separate report response. Each batch request is
allowed up to 5 requests.
"""
entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,)
requests = proto.RepeatedField(proto.MESSAGE, number=2, message="RunReportRequest",)
class BatchRunReportsResponse(proto.Message):
r"""The batch response containing multiple reports.
Attributes:
reports (Sequence[google.analytics.data_v1alpha.types.RunReportResponse]):
Individual responses. Each response has a
separate report request.
"""
reports = proto.RepeatedField(proto.MESSAGE, number=1, message="RunReportResponse",)
class BatchRunPivotReportsRequest(proto.Message):
r"""The batch request containing multiple pivot report requests.
Attributes:
entity (google.analytics.data_v1alpha.types.Entity):
A property whose events are tracked. This
entity must be specified for the batch. The
entity within RunPivotReportRequest may either
be unspecified or consistent with this entity.
requests (Sequence[google.analytics.data_v1alpha.types.RunPivotReportRequest]):
Individual requests. Each request has a
separate pivot report response. Each batch
request is allowed up to 5 requests.
"""
entity = proto.Field(proto.MESSAGE, number=1, message=data.Entity,)
requests = proto.RepeatedField(
proto.MESSAGE, number=2, message="RunPivotReportRequest",
)
class BatchRunPivotReportsResponse(proto.Message):
r"""The batch response containing multiple pivot reports.
Attributes:
pivot_reports (Sequence[google.analytics.data_v1alpha.types.RunPivotReportResponse]):
Individual responses. Each response has a
separate pivot report request.
"""
pivot_reports = proto.RepeatedField(
proto.MESSAGE, number=1, message="RunPivotReportResponse",
)
class GetMetadataRequest(proto.Message):
r"""Request for a property's dimension and metric metadata.
Attributes:
name (str):
Required. The resource name of the metadata to retrieve.
This name field is specified in the URL path and not URL
parameters. Property is a numeric Google Analytics GA4
Property identifier. To learn more, see `where to find your
Property
ID <https://developers.google.com/analytics/devguides/reporting/data/v1/property-id>`__.
Example: properties/1234/metadata
Set the Property ID to 0 for dimensions and metrics common
to all properties. In this special mode, this method will
not return custom dimensions and metrics.
"""
name = proto.Field(proto.STRING, number=1,)
class RunRealtimeReportRequest(proto.Message):
r"""The request to generate a realtime report.
Attributes:
property (str):
A Google Analytics GA4 property identifier whose events are
tracked. Specified in the URL path and not the body. To
learn more, see `where to find your Property
ID <https://developers.google.com/analytics/devguides/reporting/data/v1/property-id>`__.
Example: properties/1234
dimensions (Sequence[google.analytics.data_v1alpha.types.Dimension]):
The dimensions requested and displayed.
metrics (Sequence[google.analytics.data_v1alpha.types.Metric]):
The metrics requested and displayed.
limit (int):
The number of rows to return. If unspecified,
10 rows are returned. If -1, all rows are
returned.
dimension_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of dimensions. Dimensions
must be requested to be used in this filter.
Metrics cannot be used in this filter.
metric_filter (google.analytics.data_v1alpha.types.FilterExpression):
The filter clause of metrics. Applied at post
aggregation phase, similar to SQL having-clause.
Metrics must be requested to be used in this
filter. Dimensions cannot be used in this
filter.
metric_aggregations (Sequence[google.analytics.data_v1alpha.types.MetricAggregation]):
Aggregation of metrics. Aggregated metric values will be
shown in rows where the dimension_values are set to
"RESERVED_(MetricAggregation)".
order_bys (Sequence[google.analytics.data_v1alpha.types.OrderBy]):
Specifies how rows are ordered in the
response.
return_property_quota (bool):
Toggles whether to return the current state of this
Analytics Property's Realtime quota. Quota is returned in
`PropertyQuota <#PropertyQuota>`__.
"""
property = proto.Field(proto.STRING, number=1,)
dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
limit = proto.Field(proto.INT64, number=4,)
dimension_filter = proto.Field(
proto.MESSAGE, number=5, message=data.FilterExpression,
)
metric_filter = proto.Field(proto.MESSAGE, number=6, message=data.FilterExpression,)
metric_aggregations = proto.RepeatedField(
proto.ENUM, number=7, enum=data.MetricAggregation,
)
order_bys = proto.RepeatedField(proto.MESSAGE, number=8, message=data.OrderBy,)
return_property_quota = proto.Field(proto.BOOL, number=9,)
class RunRealtimeReportResponse(proto.Message):
r"""The response realtime report table corresponding to a
request.
Attributes:
dimension_headers (Sequence[google.analytics.data_v1alpha.types.DimensionHeader]):
Describes dimension columns. The number of
DimensionHeaders and ordering of
DimensionHeaders matches the dimensions present
in rows.
metric_headers (Sequence[google.analytics.data_v1alpha.types.MetricHeader]):
Describes metric columns. The number of
MetricHeaders and ordering of MetricHeaders
matches the metrics present in rows.
rows (Sequence[google.analytics.data_v1alpha.types.Row]):
Rows of dimension value combinations and
metric values in the report.
totals (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the totaled values of metrics.
maximums (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the maximum values of metrics.
minimums (Sequence[google.analytics.data_v1alpha.types.Row]):
If requested, the minimum values of metrics.
row_count (int):
The total number of rows in the query result, regardless of
the number of rows returned in the response. For example if
a query returns 175 rows and includes limit = 50 in the API
request, the response will contain row_count = 175 but only
50 rows.
property_quota (google.analytics.data_v1alpha.types.PropertyQuota):
This Analytics Property's Realtime quota
state including this request.
"""
dimension_headers = proto.RepeatedField(
proto.MESSAGE, number=1, message=data.DimensionHeader,
)
metric_headers = proto.RepeatedField(
proto.MESSAGE, number=2, message=data.MetricHeader,
)
rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,)
totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,)
maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,)
minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,)
row_count = proto.Field(proto.INT32, number=7,)
property_quota = proto.Field(proto.MESSAGE, number=8, message=data.PropertyQuota,)
__all__ = tuple(sorted(__protobuf__.manifest))
| 46.459016 | 115 | 0.66549 |
635ee72c2d406ab690ddfb461a43a82f0722c472 | 1,690 | py | Python | bin/multidoc_jsonl_dataset_to_parallel_dataset.py | chrishokamp/dynamic-transformer-ensembles | 2757c68936d6aca0ab06cb86e988cd96155a2b1c | [
"MIT"
] | 26 | 2020-06-17T01:33:00.000Z | 2021-11-22T07:05:36.000Z | bin/multidoc_jsonl_dataset_to_parallel_dataset.py | chrishokamp/dynamic-transformer-ensembles | 2757c68936d6aca0ab06cb86e988cd96155a2b1c | [
"MIT"
] | 2 | 2021-05-25T17:13:37.000Z | 2021-07-31T11:33:33.000Z | bin/multidoc_jsonl_dataset_to_parallel_dataset.py | chrishokamp/dynamic-transformer-ensembles | 2757c68936d6aca0ab06cb86e988cd96155a2b1c | [
"MIT"
] | 1 | 2020-06-17T19:49:54.000Z | 2020-06-17T19:49:54.000Z | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
# flatten a multidoc summarization dataset in .jsonl format to a parallel dataset that uses
# the *.sources *.targets format from cnn-dm
# TODO: support shuffling since the cluster items will be sequential by default
# TODO: support formatting with special tokens to indicate document structure (i.e. <SEP> token between Title and Body)
# In[1]:
from pathlib import Path
import json
import tqdm
import numpy as np
from transformer_decoding.evaluate import article_to_text
DATADIR = Path('/home/chris/projects/aylien/dynamic-ensembles/data/WCEP')
prefixes = ['train', 'val']
shuffle = True
separator_token = ' [SEP] '
for dataset_prefix in prefixes:
sources_and_targets = []
cluster_cnt = 0
print('loading clusters')
for cluster in tqdm.tqdm((json.loads(l) for l in open(DATADIR / (dataset_prefix + '.jsonl')))):
for article in cluster['articles']:
sources_and_targets.append((article_to_text(article, separator_token=separator_token), cluster['summary']))
cluster_cnt += 1
output_idxs = np.arange(len(sources_and_targets))
if shuffle:
np.random.shuffle(output_idxs)
with open(DATADIR / (dataset_prefix + '.sources'), 'w') as srcs, open(DATADIR / (dataset_prefix + '.targets'), 'w') as tgts:
for idx in tqdm.tqdm(output_idxs):
src = sources_and_targets[idx][0]
tgt = sources_and_targets[idx][1]
srcs.write(f'{src}\n')
tgts.write(f'{tgt}\n')
print(f'wrote {len(sources_and_targets)} segments from {cluster_cnt} clusters to {srcs.name} and {tgts.name}')
# In[ ]:
| 26.40625 | 128 | 0.671006 |
896a7231c36268704d5c743e6992fec4e46e70fd | 21,730 | py | Python | tests/package/test_manager.py | ufo2011/platformio-core | 0ceae62701731f8b32c34d7993a34dea34aea59c | [
"Apache-2.0"
] | null | null | null | tests/package/test_manager.py | ufo2011/platformio-core | 0ceae62701731f8b32c34d7993a34dea34aea59c | [
"Apache-2.0"
] | null | null | null | tests/package/test_manager.py | ufo2011/platformio-core | 0ceae62701731f8b32c34d7993a34dea34aea59c | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import logging
import os
import time
from pathlib import Path
import pytest
import semantic_version
from platformio import fs, util
from platformio.package.exception import (
MissingPackageManifestError,
UnknownPackageError,
)
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager
from platformio.package.meta import PackageSpec
from platformio.package.pack import PackagePacker
def test_download(isolated_pio_core):
url = "https://github.com/platformio/platformio-core/archive/v4.3.4.zip"
checksum = "69d59642cb91e64344f2cdc1d3b98c5cd57679b5f6db7accc7707bd4c5d9664a"
lm = LibraryPackageManager()
lm.set_log_level(logging.ERROR)
archive_path = lm.download(url, checksum)
assert fs.calculate_file_hashsum("sha256", archive_path) == checksum
lm.cleanup_expired_downloads(time.time())
assert os.path.isfile(archive_path)
# test outdated downloads
lm.set_download_utime(archive_path, time.time() - lm.DOWNLOAD_CACHE_EXPIRE - 1)
lm.cleanup_expired_downloads(time.time())
assert not os.path.isfile(archive_path)
# check that key is deleted from DB
with open(lm.get_download_usagedb_path(), encoding="utf8") as fp:
assert os.path.basename(archive_path) not in fp.read()
def test_find_pkg_root(isolated_pio_core, tmpdir_factory):
# has manifest
pkg_dir = tmpdir_factory.mktemp("package-has-manifest")
root_dir = pkg_dir.join("nested").mkdir().join("folder").mkdir()
root_dir.join("platform.json").write("")
pm = PlatformPackageManager()
found_dir = pm.find_pkg_root(str(pkg_dir), spec=None)
assert os.path.realpath(str(root_dir)) == os.path.realpath(found_dir)
# does not have manifest
pkg_dir = tmpdir_factory.mktemp("package-does-not-have-manifest")
pkg_dir.join("nested").mkdir().join("folder").mkdir().join("readme.txt").write("")
pm = PlatformPackageManager()
with pytest.raises(MissingPackageManifestError):
pm.find_pkg_root(str(pkg_dir), spec=None)
# library package without manifest, should find source root
pkg_dir = tmpdir_factory.mktemp("library-package-without-manifest")
root_dir = pkg_dir.join("nested").mkdir().join("folder").mkdir()
root_dir.join("src").mkdir().join("main.cpp").write("")
root_dir.join("include").mkdir().join("main.h").write("")
assert os.path.realpath(str(root_dir)) == os.path.realpath(
LibraryPackageManager.find_library_root(str(pkg_dir))
)
# library manager should create "library.json"
lm = LibraryPackageManager()
spec = PackageSpec("custom-name@1.0.0")
pkg_root = lm.find_pkg_root(str(pkg_dir), spec)
manifest_path = os.path.join(pkg_root, "library.json")
assert os.path.realpath(str(root_dir)) == os.path.realpath(pkg_root)
assert os.path.isfile(manifest_path)
manifest = lm.load_manifest(pkg_root)
assert manifest["name"] == "custom-name"
assert "0.0.0" in str(manifest["version"])
def test_build_legacy_spec(isolated_pio_core, tmpdir_factory):
storage_dir = tmpdir_factory.mktemp("storage")
pm = PlatformPackageManager(str(storage_dir))
# test src manifest
pkg1_dir = storage_dir.join("pkg-1").mkdir()
pkg1_dir.join(".pio").mkdir().join(".piopkgmanager.json").write(
"""
{
"name": "StreamSpy-0.0.1.tar",
"url": "https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz",
"requirements": null
}
"""
)
assert pm.build_legacy_spec(str(pkg1_dir)) == PackageSpec(
name="StreamSpy-0.0.1.tar",
uri="https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz",
)
# without src manifest
pkg2_dir = storage_dir.join("pkg-2").mkdir()
pkg2_dir.join("main.cpp").write("")
with pytest.raises(MissingPackageManifestError):
pm.build_legacy_spec(str(pkg2_dir))
# with package manifest
pkg3_dir = storage_dir.join("pkg-3").mkdir()
pkg3_dir.join("platform.json").write('{"name": "pkg3", "version": "1.2.0"}')
assert pm.build_legacy_spec(str(pkg3_dir)) == PackageSpec(name="pkg3")
def test_build_metadata(isolated_pio_core, tmpdir_factory):
pm = PlatformPackageManager()
vcs_revision = "a2ebfd7c0f"
pkg_dir = tmpdir_factory.mktemp("package")
# test package without manifest
with pytest.raises(MissingPackageManifestError):
pm.load_manifest(str(pkg_dir))
with pytest.raises(MissingPackageManifestError):
pm.build_metadata(str(pkg_dir), PackageSpec("MyLib"))
# with manifest
pkg_dir.join("platform.json").write(
'{"name": "Dev-Platform", "version": "1.2.3-alpha.1"}'
)
metadata = pm.build_metadata(str(pkg_dir), PackageSpec("owner/platform-name"))
assert metadata.name == "Dev-Platform"
assert str(metadata.version) == "1.2.3-alpha.1"
# with vcs
metadata = pm.build_metadata(
str(pkg_dir), PackageSpec("owner/platform-name"), vcs_revision
)
assert str(metadata.version) == ("1.2.3-alpha.1+sha." + vcs_revision)
assert metadata.version.build[1] == vcs_revision
def test_install_from_uri(isolated_pio_core, tmpdir_factory):
tmp_dir = tmpdir_factory.mktemp("tmp")
storage_dir = tmpdir_factory.mktemp("storage")
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
# install from local directory
src_dir = tmp_dir.join("local-lib-dir").mkdir()
src_dir.join("main.cpp").write("")
spec = PackageSpec("file://%s" % src_dir)
pkg = lm.install(spec)
assert os.path.isfile(os.path.join(pkg.path, "main.cpp"))
manifest = lm.load_manifest(pkg)
assert manifest["name"] == "local-lib-dir"
assert manifest["version"].startswith("0.0.0+")
assert spec == pkg.metadata.spec
# install from local archive
src_dir = tmp_dir.join("archive-src").mkdir()
root_dir = src_dir.mkdir("root")
root_dir.mkdir("src").join("main.cpp").write("#include <stdio.h>")
root_dir.join("library.json").write(
'{"name": "manifest-lib-name", "version": "2.0.0"}'
)
tarball_path = PackagePacker(str(src_dir)).pack(str(tmp_dir))
spec = PackageSpec("file://%s" % tarball_path)
pkg = lm.install(spec)
assert os.path.isfile(os.path.join(pkg.path, "src", "main.cpp"))
assert pkg == lm.get_package(spec)
assert spec == pkg.metadata.spec
# install from registry
src_dir = tmp_dir.join("registry-1").mkdir()
src_dir.join("library.properties").write(
"""
name = wifilib
version = 5.2.7
"""
)
spec = PackageSpec("company/wifilib @ ^5")
pkg = lm.install_from_uri("file://%s" % src_dir, spec)
assert str(pkg.metadata.version) == "5.2.7"
# check package folder names
lm.memcache_reset()
assert ["local-lib-dir", "manifest-lib-name", "wifilib"] == [
os.path.basename(pkg.path) for pkg in lm.get_installed()
]
def test_install_from_registry(isolated_pio_core, tmpdir_factory):
# Libraries
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
lm.set_log_level(logging.ERROR)
# library with dependencies
lm.install("AsyncMqttClient-esphome @ 0.8.6")
assert len(lm.get_installed()) == 3
pkg = lm.get_package("AsyncTCP-esphome")
assert pkg.metadata.spec.owner == "esphome"
assert not lm.get_package("non-existing-package")
# mbed library
assert lm.install("wolfSSL")
assert len(lm.get_installed()) == 4
# case sensitive author name
assert lm.install("DallasTemperature")
assert lm.get_package("OneWire").metadata.version.major >= 2
assert len(lm.get_installed()) == 6
# test conflicted names
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage")))
lm.set_log_level(logging.ERROR)
lm.install("z3t0/IRremote@2.6.1")
lm.install("mbed-yuhki50/IRremote")
assert len(lm.get_installed()) == 2
# Tools
tm = ToolPackageManager(str(tmpdir_factory.mktemp("tool-storage")))
tm.set_log_level(logging.ERROR)
pkg = tm.install("platformio/tool-stlink @ ~1.10400.0")
manifest = tm.load_manifest(pkg)
assert tm.is_system_compatible(manifest.get("system"))
assert util.get_systype() in manifest.get("system", [])
# Test unknown
with pytest.raises(UnknownPackageError):
tm.install("unknown-package-tool @ 9.1.1")
with pytest.raises(UnknownPackageError):
tm.install("owner/unknown-package-tool")
def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory):
tmp_dir = tmpdir_factory.mktemp("tmp")
src_dir = tmp_dir.join("lib-with-deps").mkdir()
root_dir = src_dir.mkdir("root")
root_dir.mkdir("src").join("main.cpp").write("#include <stdio.h>")
root_dir.join("library.json").write(
"""
{
"name": "lib-with-deps",
"version": "2.0.0",
"dependencies": [
{
"owner": "bblanchon",
"name": "ArduinoJson",
"version": "^6.16.1"
},
{
"name": "external-repo",
"version": "https://github.com/milesburton/Arduino-Temperature-Control-Library.git#4a0ccc1"
}
]
}
"""
)
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
lm.set_log_level(logging.ERROR)
lm.install("file://%s" % str(src_dir))
installed = lm.get_installed()
assert len(installed) == 4
assert set(["external-repo", "ArduinoJson", "lib-with-deps", "OneWire"]) == set(
p.metadata.name for p in installed
)
def test_install_force(isolated_pio_core, tmpdir_factory):
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
lm.set_log_level(logging.ERROR)
# install #64 ArduinoJson
pkg = lm.install("64 @ ^5")
assert pkg.metadata.version.major == 5
# try install the latest without specification
pkg = lm.install("64")
assert pkg.metadata.version.major == 5
assert len(lm.get_installed()) == 1
# re-install the latest
pkg = lm.install(64, force=True)
assert len(lm.get_installed()) == 1
assert pkg.metadata.version.major > 5
def test_symlink(tmp_path: Path):
external_pkg_dir = tmp_path / "External"
external_pkg_dir.mkdir()
(external_pkg_dir / "library.json").write_text(
"""
{
"name": "External",
"version": "1.0.0"
}
"""
)
storage_dir = tmp_path / "storage"
installed_pkg_dir = storage_dir / "installed"
installed_pkg_dir.mkdir(parents=True)
(installed_pkg_dir / "library.json").write_text(
"""
{
"name": "Installed",
"version": "1.0.0"
}
"""
)
spec = "CustomExternal=symlink://%s" % str(external_pkg_dir)
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
pkg = lm.install(spec)
assert os.path.isfile(str(storage_dir / "CustomExternal.pio-link"))
assert pkg.metadata.name == "External"
assert pkg.metadata.version.major == 1
assert ["External", "Installed"] == [
pkg.metadata.name for pkg in lm.get_installed()
]
pkg = lm.get_package("External")
assert Path(pkg.path) == external_pkg_dir
assert pkg.metadata.spec.uri.startswith("symlink://")
assert lm.get_package(spec).metadata.spec.uri.startswith("symlink://")
# try to update
lm.update(pkg)
# uninstall
lm.uninstall("External")
assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()]
# ensure original package was not rmeoved
assert external_pkg_dir.is_dir()
# install again, remove from a disk
assert lm.install("symlink://%s" % str(external_pkg_dir))
assert os.path.isfile(str(storage_dir / "External.pio-link"))
assert ["External", "Installed"] == [
pkg.metadata.name for pkg in lm.get_installed()
]
fs.rmtree(str(external_pkg_dir))
lm.memcache_reset()
assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()]
def test_scripts(isolated_pio_core, tmp_path: Path):
pkg_dir = tmp_path / "foo"
scripts_dir = pkg_dir / "scripts"
scripts_dir.mkdir(parents=True)
(scripts_dir / "script.py").write_text(
"""
import sys
from pathlib import Path
action = "postinstall" if len(sys.argv) == 1 else sys.argv[1]
Path("%s.flag" % action).touch()
if action == "preuninstall":
Path("../%s.flag" % action).touch()
"""
)
(pkg_dir / "library.json").write_text(
"""
{
"name": "foo",
"version": "1.0.0",
"scripts": {
"postinstall": "scripts/script.py",
"preuninstall2": ["scripts/script.py", "preuninstall"]
}
}
"""
)
storage_dir = tmp_path / "storage"
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
lm.install("file://%s" % str(pkg_dir))
assert os.path.isfile(os.path.join(lm.get_package("foo").path, "postinstall.flag"))
lm.uninstall("foo")
(storage_dir / "preuninstall.flag").is_file()
def test_install_circular_dependencies(tmp_path: Path):
storage_dir = tmp_path / "storage"
# Foo
pkg_dir = storage_dir / "foo"
pkg_dir.mkdir(parents=True)
(pkg_dir / "library.json").write_text(
"""
{
"name": "Foo",
"version": "1.0.0",
"dependencies": {
"Bar": "*"
}
}
"""
)
# Bar
pkg_dir = storage_dir / "bar"
pkg_dir.mkdir(parents=True)
(pkg_dir / "library.json").write_text(
"""
{
"name": "Bar",
"version": "1.0.0",
"dependencies": {
"Foo": "*"
}
}
"""
)
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
assert len(lm.get_installed()) == 2
# root library
pkg_dir = tmp_path / "root"
pkg_dir.mkdir(parents=True)
(pkg_dir / "library.json").write_text(
"""
{
"name": "Root",
"version": "1.0.0",
"dependencies": {
"Foo": "^1.0.0",
"Bar": "^1.0.0"
}
}
"""
)
lm.install("file://%s" % str(pkg_dir))
def test_get_installed(isolated_pio_core, tmpdir_factory):
storage_dir = tmpdir_factory.mktemp("storage")
pm = ToolPackageManager(str(storage_dir))
# VCS package
(
storage_dir.join("pkg-vcs")
.mkdir()
.join(".git")
.mkdir()
.join(".piopm")
.write(
"""
{
"name": "pkg-via-vcs",
"spec": {
"id": null,
"name": "pkg-via-vcs",
"owner": null,
"requirements": null,
"url": "git+https://github.com/username/repo.git"
},
"type": "tool",
"version": "0.0.0+sha.1ea4d5e"
}
"""
)
)
# package without metadata file
(
storage_dir.join("foo@3.4.5")
.mkdir()
.join("package.json")
.write('{"name": "foo", "version": "3.4.5"}')
)
# package with metadata file
foo_dir = storage_dir.join("foo").mkdir()
foo_dir.join("package.json").write('{"name": "foo", "version": "3.6.0"}')
foo_dir.join(".piopm").write(
"""
{
"name": "foo",
"spec": {
"name": "foo",
"owner": null,
"requirements": "^3"
},
"type": "tool",
"version": "3.6.0"
}
"""
)
# test "system"
storage_dir.join("pkg-incompatible-system").mkdir().join("package.json").write(
'{"name": "check-system", "version": "4.0.0", "system": ["unknown"]}'
)
storage_dir.join("pkg-compatible-system").mkdir().join("package.json").write(
'{"name": "check-system", "version": "3.0.0", "system": "%s"}'
% util.get_systype()
)
# invalid package
storage_dir.join("invalid-package").mkdir().join("library.json").write(
'{"name": "SomeLib", "version": "4.0.0"}'
)
installed = pm.get_installed()
assert len(installed) == 4
assert set(["pkg-via-vcs", "foo", "check-system"]) == set(
p.metadata.name for p in installed
)
assert str(pm.get_package("foo").metadata.version) == "3.6.0"
assert str(pm.get_package("check-system").metadata.version) == "3.0.0"
def test_uninstall(isolated_pio_core, tmpdir_factory):
tmp_dir = tmpdir_factory.mktemp("tmp")
storage_dir = tmpdir_factory.mktemp("storage")
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
# foo @ 1.0.0
pkg_dir = tmp_dir.join("foo").mkdir()
pkg_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}')
foo_1_0_0_pkg = lm.install_from_uri("file://%s" % pkg_dir, "foo")
# foo @ 1.3.0
pkg_dir = tmp_dir.join("foo-1.3.0").mkdir()
pkg_dir.join("library.json").write('{"name": "foo", "version": "1.3.0"}')
lm.install_from_uri("file://%s" % pkg_dir, "foo")
# bar
pkg_dir = tmp_dir.join("bar").mkdir()
pkg_dir.join("library.json").write('{"name": "bar", "version": "1.0.0"}')
bar_pkg = lm.install("file://%s" % pkg_dir)
assert len(lm.get_installed()) == 3
assert os.path.isdir(os.path.join(str(storage_dir), "foo"))
assert os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0"))
# check detaching
assert lm.uninstall("FOO")
assert len(lm.get_installed()) == 2
assert os.path.isdir(os.path.join(str(storage_dir), "foo"))
assert not os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0"))
# uninstall the rest
assert lm.uninstall(foo_1_0_0_pkg.path)
assert lm.uninstall(bar_pkg)
assert not lm.get_installed()
# test uninstall dependencies
assert lm.install("AsyncMqttClient-esphome @ 0.8.4")
assert len(lm.get_installed()) == 3
assert lm.uninstall("AsyncMqttClient-esphome", skip_dependencies=True)
assert len(lm.get_installed()) == 2
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
assert lm.install("AsyncMqttClient-esphome @ 0.8.4")
assert lm.uninstall("AsyncMqttClient-esphome")
assert not lm.get_installed()
def test_registry(isolated_pio_core):
lm = LibraryPackageManager()
lm.set_log_level(logging.ERROR)
# reveal ID
assert lm.reveal_registry_package_id(PackageSpec(id=13)) == 13
assert lm.reveal_registry_package_id(PackageSpec(name="OneWire")) == 1
with pytest.raises(UnknownPackageError):
lm.reveal_registry_package_id(PackageSpec(name="/non-existing-package/"))
# fetch package data
assert lm.fetch_registry_package(PackageSpec(id=1))["name"] == "OneWire"
assert lm.fetch_registry_package(PackageSpec(name="ArduinoJson"))["id"] == 64
assert (
lm.fetch_registry_package(
PackageSpec(id=13, owner="adafruit", name="Renamed library")
)["name"]
== "Adafruit GFX Library"
)
with pytest.raises(UnknownPackageError):
lm.fetch_registry_package(
PackageSpec(owner="unknown<>owner", name="/non-existing-package/")
)
with pytest.raises(UnknownPackageError):
lm.fetch_registry_package(PackageSpec(name="/non-existing-package/"))
def test_update_with_metadata(isolated_pio_core, tmpdir_factory):
storage_dir = tmpdir_factory.mktemp("storage")
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
# test non SemVer in registry
pkg = lm.install("adafruit/Adafruit NeoPixel @ <1.9")
outdated = lm.outdated(pkg)
assert str(outdated.current) == "1.8.7"
assert outdated.latest > semantic_version.Version("1.10.0")
pkg = lm.install("ArduinoJson @ 5.10.1")
# test latest
outdated = lm.outdated(pkg)
assert str(outdated.current) == "5.10.1"
assert outdated.wanted is None
assert outdated.latest > outdated.current
assert outdated.latest > semantic_version.Version("5.99.99")
# test wanted
outdated = lm.outdated(pkg, PackageSpec("ArduinoJson@~5"))
assert str(outdated.current) == "5.10.1"
assert str(outdated.wanted) == "5.13.4"
assert outdated.latest > semantic_version.Version("6.16.0")
# update to the wanted 5.x
new_pkg = lm.update("ArduinoJson@^5", PackageSpec("ArduinoJson@^5"))
assert str(new_pkg.metadata.version) == "5.13.4"
# check that old version is removed
assert len(lm.get_installed()) == 2
# update to the latest
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
pkg = lm.update("ArduinoJson")
assert pkg.metadata.version == outdated.latest
def test_update_without_metadata(isolated_pio_core, tmpdir_factory):
storage_dir = tmpdir_factory.mktemp("storage")
storage_dir.join("legacy-package").mkdir().join("library.json").write(
'{"name": "AsyncMqttClient-esphome", "version": "0.8"}'
)
storage_dir.join("legacy-dep").mkdir().join("library.json").write(
'{"name": "AsyncTCP-esphome", "version": "1.1.1"}'
)
lm = LibraryPackageManager(str(storage_dir))
pkg = lm.get_package("AsyncMqttClient-esphome")
outdated = lm.outdated(pkg)
assert len(lm.get_installed()) == 2
assert str(pkg.metadata.version) == "0.8.0"
assert outdated.latest > semantic_version.Version("0.8.0")
# update
lm = LibraryPackageManager(str(storage_dir))
lm.set_log_level(logging.ERROR)
new_pkg = lm.update(pkg)
assert len(lm.get_installed()) == 4
assert new_pkg.metadata.spec.owner == "ottowinter"
| 33.2263 | 97 | 0.66231 |
4b6311ffbe983f8a859e4537c87644c018a7c040 | 5,999 | py | Python | toollib/webdriver.py | atpuxiner/toollib | a895daeba49e64022a4a11a67a8b7b7a44cd2e0f | [
"MIT"
] | 113 | 2021-12-15T05:23:13.000Z | 2022-03-30T10:29:13.000Z | toollib/webdriver.py | atpuxiner/toollib | a895daeba49e64022a4a11a67a8b7b7a44cd2e0f | [
"MIT"
] | null | null | null | toollib/webdriver.py | atpuxiner/toollib | a895daeba49e64022a4a11a67a8b7b7a44cd2e0f | [
"MIT"
] | 3 | 2022-02-22T01:43:44.000Z | 2022-03-21T01:05:49.000Z | """
@author axiner
@version v1.0.0
@created 2022/1/18 21:05
@abstract web驱动
@description
@history
"""
import os
import re
import shutil
import sys
import typing as t
import urllib.request as urlrequest
import winreg
from pathlib import Path
from toollib.validator import choicer
try:
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chromium.service import ChromiumService
from selenium.webdriver.chromium.webdriver import ChromiumDriver
except ImportError:
raise
__all__ = ['ChromeDriver']
class ChromeDriver(ChromiumDriver):
"""
谷歌驱动(继承于selenium)
- 可自动下载驱动(注:若指定目录存在与浏览器版本一致的驱动则会跳过)
使用示例:
# 1)不指定浏览器版本,则下载当前浏览器对应的版本(针对win平台,mac|linux则下载最新版本)
driver = ChromeDriver()
driver.get('https://www.baidu.com/')
# 2)指定浏览器版本(版本号可在浏览器中查看)(注:driver_dir为驱动器存放目录,可自定义)
driver = ChromeDriver(driver_dir='D:/tmp', version='96.0.4664.45')
driver.get('https://www.baidu.com/')
+++++[更多详见参数或源码]+++++
"""
def __init__(self, driver_dir: t.Union[str, Path] = '.', version: str = 'LATEST_RELEASE',
platform: str = 'win64',
port=0, options: Options = None,
service_args: t.List[str] = None, desired_capabilities=None,
service_log_path=None, env: dict = None, start_error_message: str = None,
service: ChromiumService = None, keep_alive=None):
"""
谷歌驱动
:param driver_dir: 驱动目录(默认当前执行目录)
:param version: 版本(谷歌浏览器)
:param platform: 平台(默认:win64)-支持:['win32', 'win64', 'mac64', 'linux64']
:param port:
:param options:
:param service_args:
:param desired_capabilities:
:param service_log_path:
:param env:
:param start_error_message:
:param service:
:param keep_alive:
"""
if not service:
if not start_error_message:
start_error_message = 'Please see https://sites.google.com/a/chromium.org/chromedriver/home'
executable_path = self.__download_driver(driver_dir, version, platform)
service = ChromiumService(
executable_path, port, service_args, service_log_path, env, start_error_message)
super(ChromeDriver, self).__init__(
'chrome', 'goog', port, options, service_args,
desired_capabilities, service_log_path, service, keep_alive)
@classmethod
def __download_driver(cls, driver_dir: str, version: str, platform: str) -> str:
driver_dir = Path(driver_dir).absolute()
if driver_dir.is_file():
raise TypeError('"driver_dir" is dir')
else:
driver_dir.mkdir(parents=True, exist_ok=True)
platform = choicer(
platform,
choices=['win32', 'win64', 'mac64', 'linux64'],
lable='platform')
if platform.startswith('win'):
platform = 'win32'
version = cls.__get_version(version)
exec_file = 'chromedriver.exe'
else:
exec_file = 'chromedriver'
driver_file = driver_dir.joinpath(exec_file)
if driver_file.is_file():
if cls.__check_driver_version(driver_file, version, platform):
return driver_file.as_posix()
__version = cls.__find_similar_version(version)
if not __version:
raise ValueError('This version may not exist')
__driver_zip = driver_dir.joinpath(f'chromedriver_{platform}.zip')
__download_url = f'https://chromedriver.storage.googleapis.com/' \
f'{__version}/{__driver_zip.name}'
try:
sys.stdout.write(f'Download driver({__driver_zip.stem}) start.....')
urlrequest.urlretrieve(__download_url, __driver_zip.as_posix())
shutil.unpack_archive(__driver_zip, driver_dir, 'zip')
os.remove(__driver_zip)
return driver_file.as_posix()
except:
raise
@staticmethod
def __get_version(version: str) -> str:
if version == 'LATEST_RELEASE' or not version:
try:
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'Software\Google\Chrome\BLBeacon')
value, _type = winreg.QueryValueEx(key, 'version')
version = value or 'LATEST_RELEASE'
except Exception as err:
sys.stdout.write(str(err))
sys.stdout.write('将版本赋值为最新版本"LATEST_RELEASE"')
version = 'LATEST_RELEASE'
version = '.'.join(version.split('.')[:3])
return version
@staticmethod
def __check_driver_version(driver_file: str, version: str, platform: str) -> bool:
is_eq = True
try:
if platform.startswith('win'):
outstd = os.popen(f'{driver_file} --version').read()
cv_split = outstd.split()[1].split('.')[:3]
v_split = version.split('.')
if cv_split != v_split:
if len(v_split) > 1:
is_eq = '.'.join(cv_split).startswith('.'.join(v_split))
else:
is_eq = (v_split == cv_split[:1])
except:
is_eq = False
return is_eq
@staticmethod
def __find_similar_version(version: str) -> str:
url = 'https://chromedriver.storage.googleapis.com/'
if version == 'LATEST_RELEASE':
url += version
sml_version = None
try:
version_resp = urlrequest.urlopen(url)
htm = version_resp.read().decode('utf8')
if version == 'LATEST_RELEASE':
sml_version = htm
else:
pat = rf'<Key>({version}[\d.]*)/chromedriver_[\w.]+.zip</Key>'
result = re.findall(pat, htm)
if result:
sml_version = max(result)
finally:
pass
return sml_version
| 37.26087 | 108 | 0.590265 |
94647f29288eed96bee212593a6bfcee89a4313f | 337 | py | Python | demo/gql/mutation.py | YanickJair/graphene-demo | 0b2050639605dd42762a015247bd277bb46814d7 | [
"Apache-2.0"
] | 2 | 2021-07-02T17:02:37.000Z | 2022-02-09T22:13:08.000Z | demo/gql/mutation.py | YanickJair/graphene-demo | 0b2050639605dd42762a015247bd277bb46814d7 | [
"Apache-2.0"
] | null | null | null | demo/gql/mutation.py | YanickJair/graphene-demo | 0b2050639605dd42762a015247bd277bb46814d7 | [
"Apache-2.0"
] | null | null | null | import graphene
import demo.gql.resolvers as resolvers
class Mutation(graphene.AbstractType):
create_prod_category = resolvers.CreateProductCategory.Field()
create_product = resolvers.CreateProduct.Field()
manage_favorites = resolvers.ManageFavorites.Field()
comment = resolvers.Comment.Field()
| 30.636364 | 66 | 0.738872 |
b3e2eca6edc6e541fe0eface8725e6c675416220 | 12,662 | py | Python | mozillians/users/migrations/0044_migration_multi_website.py | caktus/mozillians | 312eb5d993b60092fa4f8eb94548c1db4b21fa01 | [
"BSD-3-Clause"
] | null | null | null | mozillians/users/migrations/0044_migration_multi_website.py | caktus/mozillians | 312eb5d993b60092fa4f8eb94548c1db4b21fa01 | [
"BSD-3-Clause"
] | null | null | null | mozillians/users/migrations/0044_migration_multi_website.py | caktus/mozillians | 312eb5d993b60092fa4f8eb94548c1db4b21fa01 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for profile in orm.UserProfile.objects.exclude(website=''):
profile.externalaccount_set.create(type='WEBSITE',
username=profile.website,
privacy=profile.privacy_website)
def backwards(self, orm):
# This migration is necessarily lossy if users have created multiple Sites.
for profile in orm.UserProfile.objects.all():
sites = profile.externalaccount_set.filter(type='WEBSITE')
if sites.exists():
profile.website = sites[0].username
profile.privacy_website = sites[0].privacy
profile.save()
orm.ExternalAccount.objects.filter(type='WEBSITE').delete()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc_channel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'steward': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'system': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'wiki': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'groups.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'groups.skill': {
'Meta': {'ordering': "['name']", 'object_name': 'Skill'},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'users.externalaccount': {
'Meta': {'ordering': "['type']", 'object_name': 'ExternalAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'privacy': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3'}),
'type': ('django.db.models.fields.CharField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'users.usernameblacklist': {
'Meta': {'ordering': "['value']", 'object_name': 'UsernameBlacklist'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_regex': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'users.userprofile': {
'Meta': {'ordering': "['full_name']", 'object_name': 'UserProfile', 'db_table': "'profile'"},
'allows_community_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allows_mozilla_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'basket_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
'date_mozillian': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date_vouched': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ircname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'is_vouched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Language']"}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'privacy_bio': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_city': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_country': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_date_mozillian': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_email': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_full_name': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_groups': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_ircname': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_languages': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_photo': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_region': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_skills': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_timezone': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_title': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_tshirt': ('mozillians.users.models.PrivacyField', [], {'default': '1'}),
'privacy_vouched_by': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_website': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'region': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Skill']"}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '70', 'blank': 'True'}),
'tshirt': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'vouched_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vouchees'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['users.UserProfile']", 'blank': 'True', 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['users']
symmetrical = True
| 82.220779 | 227 | 0.561365 |
2ec2e5d2573d1f2d63a8e01087f21737035b6c46 | 38,130 | py | Python | scripts/viz_src/svg.py | kadupitiya/RNN-MD | 9350ab209126983bff79f34b34e2f68f038e536c | [
"Apache-2.0"
] | 7 | 2020-05-19T02:24:37.000Z | 2021-05-27T11:01:24.000Z | scripts/viz_src/svg.py | kadupitiya/RNN-MD | 9350ab209126983bff79f34b34e2f68f038e536c | [
"Apache-2.0"
] | 1 | 2021-02-13T01:12:09.000Z | 2021-02-13T01:12:09.000Z | scripts/viz_src/svg.py | kadupitiya/RNN-MD | 9350ab209126983bff79f34b34e2f68f038e536c | [
"Apache-2.0"
] | 4 | 2020-05-20T20:50:35.000Z | 2022-01-11T08:20:04.000Z | # Pizza.py toolkit, www.cs.sandia.gov/~sjplimp/pizza.html
# Steve Plimpton, sjplimp@sandia.gov, Sandia National Laboratories
#
# Copyright (2005) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
# certain rights in this software. This software is distributed under
# the GNU General Public License.
# svg tool
from math import sqrt, atan, cos, sin, fabs
import re
import subprocess
import os
import sys
from viz_src.vizinfo import vizinfo
from viz_src.vizinfo import colors
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
oneline = "3d visualization via SVG files"
docstr = """
s = svg(d) create SVG object for data in d
d = atom snapshot object (dump, data)
s.bg("black") set background color (def = "black")
s.size(N) set image size to NxN
s.size(N,M) set image size to NxM
s.rotate(60,135) view from z theta and azimuthal phi (def = 60,30)
s.shift(x,y) translate by x,y pixels in view window (def = 0,0)
s.zoom(0.5) scale image by factor (def = 1)
s.box(0/1/2) 0/1/2 = none/variable/fixed box
s.box(0/1/2,"green") set box color
s.box(0/1/2,"red",4) set box edge thickness
s.file = "image" file prefix for created images (def = "image")
s.show(N) show image of snapshot at timestep N
s.all() make images of all selected snapshots
s.all(P) images of all, start file label at P
s.all(N,M,P) make M images of snapshot N, start label at P
s.pan(60,135,1.0,40,135,1.5) pan during all() operation
s.pan() no pan during all() (default)
args = z theta, azimuthal phi, zoom factor at beginning and end
values at each step are interpolated between beginning and end values
s.select = "$x > %g*3.0" string to pass to d.aselect.test() during all()
s.select = "" no extra aselect (default)
%g varies from 0.0 to 1.0 from beginning to end of all()
s.label(x,y,"h",size,"red","This is a label") add label to each image
s.nolabel() delete all labels
x,y coords = -0.5 to 0.5, "h" or "t" for Helvetica or Times font
size = fontsize (e.g. 10), "red" = color of text
s.acol(2,"green") set atom colors by atom type (1-N)
s.acol([2,4],["red","blue"]) 1st arg = one type or list of types
s.acol(0,"blue") 2nd arg = one color or list of colors
s.acol(range(20),["red","blue"]) if list lengths unequal, interpolate
s.acol(range(10),"loop") assign colors in loop, randomly ordered
if 1st arg is 0, set all types to 2nd arg
if list of types has a 0 (e.g. range(10)), +1 is added to each value
interpolate means colors blend smoothly from one value to the next
s.arad([1,2],[0.5,0.3]) set atom radii, same rules as acol()
s.bcol() set bond color, same args as acol()
s.brad() set bond thickness, same args as arad()
s.tcol() set triangle color, same args as acol()
s.tfill() set triangle fill, 0 fill, 1 line, 2 both
s.lcol() set line color, same args as acol()
s.lrad() set line thickness, same args as arad()
s.adef() set atom/bond/tri/line properties to default
s.bdef() default = "loop" for colors, 0.45 for radii
s.tdef() default = 0.25 for bond/line thickness
s.ldef() default = 0 fill
by default 100 types are assigned
if atom/bond/tri/line has type > # defined properties, is an error
from vizinfo import colors access color list
print colors list defined color names and RGB values
colors["nickname"] = [R,G,B] set new RGB values from 0 to 255
140 pre-defined colors: red, green, blue, purple, yellow, black, white, etc
Settings specific to svg tool:
s.thick = 2.0 pixel thickness of black atom border
"""
# History
# 8/05, Matt Jones (BYU): original version
# 9/05, Steve Plimpton: adjusted box and label attributes
# ToDo list
# when do aselect with select str while looping N times on same timestep
# would not let you grow # of atoms selected
# triangles are not drawn with fill type
# Variables
# ztheta = vertical angle from z-azis to view from
# azphi = azimuthal angle to view from
# xshift,yshift = xy translation of scene (in pixels)
# distance = size of simulation box (largest dim)
# file = filename prefix to use for images produced
# boxflag = 0/1/2 for drawing simulation box: none/variable/fixed
# bxcol = color of box
# bxthick = thickness of box
# bgcol = color of background
# vizinfo = scene attributes
# Imports and external programs
try:
from DEFAULTS import PIZZA_DISPLAY
except BaseException:
PIZZA_DISPLAY = "display"
# Class definition
class svg:
# --------------------------------------------------------------------
def __init__(self, data):
self.data = data
self.xpixels = 512
self.ypixels = 512
self.ztheta = 60
self.azphi = 30
self.scale = 1.0
self.xshift = self.yshift = 0
self.file = "image"
self.boxflag = 0
self.bxcol = [1, 1, 0]
self.bxthick = 0.3
self.bgcol = [0, 0, 0]
self.labels = []
self.panflag = 0
self.select = ""
self.thick = 1.0
self.vizinfo = vizinfo()
self.adef()
self.bdef()
self.tdef()
self.ldef()
# --------------------------------------------------------------------
def bg(self, color):
#print(colors)
self.bgcol = [colors[color][0] / 255.0, colors[color][1] / 255.0,
colors[color][2] / 255.0]
# --------------------------------------------------------------------
def size(self, newx, newy=None):
self.xpixels = newx
if not newy:
self.ypixels = self.xpixels
else:
self.ypixels = newy
# --------------------------------------------------------------------
def rotate(self, ztheta, azphi):
self.ztheta = ztheta
self.azphi = azphi
# --------------------------------------------------------------------
def shift(self, x, y):
self.xshift = x
self.yshift = y
# --------------------------------------------------------------------
def box(self, *args):
self.boxflag = args[0]
if len(args) > 1:
self.bxcol = [colors[args[1]][0] / 255.0,
colors[args[1]][1] / 255.0,
colors[args[1]][2] / 255.0]
if len(args) > 2:
self.bxthick = args[2]
# --------------------------------------------------------------------
def zoom(self, factor):
self.scale = factor
# --------------------------------------------------------------------
def show(self, ntime):
data = self.data
which = data.findtime(ntime)
time, box, atoms, bonds, tris, lines = data.viz(which)
if self.boxflag == 2:
box = data.maxbox()
self.distance = compute_distance(box)
return self.single_as_string(self.file, box, atoms, bonds, tris, lines, 1)
#cmd = "%s %s.svg" % (PIZZA_DISPLAY, self.file)
#subprocess.getoutput(cmd)
# --------------------------------------------------------------------
def pan(self, *list):
if len(list) == 0:
self.panflag = 0
else:
self.panflag = 1
self.ztheta_start = list[0]
self.azphi_start = list[1]
self.scale_start = list[2]
self.ztheta_stop = list[3]
self.azphi_stop = list[4]
self.scale_stop = list[5]
# --------------------------------------------------------------------
def all(self, *list):
data = self.data
if len(list) == 0:
nstart = 0
ncount = data.nselect
elif len(list) == 1:
nstart = list[0]
ncount = data.nselect
else:
ntime = list[0]
nstart = list[2]
ncount = list[1]
if self.boxflag == 2:
box = data.maxbox()
# loop over all selected steps
# distance from 1st snapshot box or max box for all selected steps
# call single() w/ scaling on 1st step or if panning
if len(list) <= 1:
n = nstart
i = flag = 0
while 1:
which, time, flag = data.iterator(flag)
if flag == -1:
break
fraction = float(i) / (ncount - 1)
if self.select != "":
newstr = self.select % fraction
data.aselect.test(newstr, time)
time, boxone, atoms, bonds, tris, lines = data.viz(which)
if self.boxflag < 2:
box = boxone
if n == nstart:
self.distance = compute_distance(box)
if n < 10:
file = self.file + "000" + str(n)
elif n < 100:
file = self.file + "00" + str(n)
elif n < 1000:
file = self.file + "0" + str(n)
else:
file = self.file + str(n)
if self.panflag:
self.ztheta = self.ztheta_start + \
fraction * (self.ztheta_stop - self.ztheta_start)
self.azphi = self.azphi_start + \
fraction * (self.azphi_stop - self.azphi_start)
self.scale = self.scale_start + \
fraction * (self.scale_stop - self.scale_start)
scaleflag = 0
if n == nstart or self.panflag:
scaleflag = 1
self.single(file, box, atoms, bonds, tris, lines, scaleflag)
print(time, end=' ')
sys.stdout.flush()
i += 1
n += 1
# loop ncount times on same step
# distance from 1st snapshot box or max box for all selected steps
# call single() w/ scaling on 1st step or if panning
else:
which = data.findtime(ntime)
n = nstart
for i in range(ncount):
fraction = float(i) / (ncount - 1)
if self.select != "":
newstr = self.select % fraction
data.aselect.test(newstr, ntime)
time, boxone, atoms, bonds, tris, lines = data.viz(which)
if self.boxflag < 2:
box = boxone
if n == nstart:
self.distance = compute_distance(box)
if n < 10:
file = self.file + "000" + str(n)
elif n < 100:
file = self.file + "00" + str(n)
elif n < 1000:
file = self.file + "0" + str(n)
else:
file = self.file + str(n)
if self.panflag:
self.ztheta = self.ztheta_start + \
fraction * (self.ztheta_stop - self.ztheta_start)
self.azphi = self.azphi_start + \
fraction * (self.azphi_stop - self.azphi_start)
self.scale = self.scale_start + \
fraction * (self.scale_stop - self.scale_start)
scaleflag = 0
if n == nstart or self.panflag:
scaleflag = 1
self.single(file, box, atoms, bonds, tris, lines, scaleflag)
print(n, end=' ')
sys.stdout.flush()
n += 1
print("\n%d images" % ncount)
# --------------------------------------------------------------------
def label(self, x, y, font, point, color, text):
from vizinfo import colors
scaledcolor = [colors[color][0] / 255.0, colors[color][1] / 255.0,
colors[color][2] / 255.0]
list = [x, y, fontlist[font], point, scaledcolor, text]
self.labels.append(list)
# --------------------------------------------------------------------
def nolabel(self):
self.labels = []
# --------------------------------------------------------------------
def single(self, file, box, atoms, bonds, tris, lines, scaleflag):
matrix = rotation_matrix('x', -self.ztheta, 'z', 270.0 - self.azphi)
if scaleflag:
self.factor = self.xpixels * self.scale / (1.6 * self.distance)
xctr = 0.5 * (box[0] + box[3])
yctr = 0.5 * (box[1] + box[4])
zctr = 0.5 * (box[2] + box[5])
self.offsetx = matrix[0] * xctr + \
matrix[3] * yctr + matrix[6] * zctr
self.offsety = matrix[1] * xctr + \
matrix[4] * yctr + matrix[7] * zctr
olist = []
for atom in atoms:
atom[0] = 0
newatom = self.transform(atom, matrix)
olist.append(newatom)
for tri in tris:
tri[0] = 1
newtri = self.transform(tri, matrix)
olist.append(newtri)
bound = 0.25 * self.distance
for bond in bonds:
newbond = [2, bond[1]]
dx = bond[5] - bond[2]
dy = bond[6] - bond[3]
dz = bond[7] - bond[4]
r = sqrt(dx * dx + dy * dy + dz * dz)
if not r:
r = 1
rad = self.vizinfo.arad[int(bond[9])]
newbond.append(bond[2] + (r / r - rad / r) * dx)
newbond.append(bond[3] + (r / r - rad / r) * dy)
newbond.append(bond[4] + (r / r - rad / r) * dz)
# cut off second side of bond
dx = bond[2] - bond[5]
dy = bond[3] - bond[6]
dz = bond[4] - bond[7]
r = sqrt(dx * dx + dy * dy + dz * dz)
if not r:
r = 1
rad = self.vizinfo.arad[int(bond[8])]
newbond.append(bond[5] + (r / r - rad / r) * dx)
newbond.append(bond[6] + (r / r - rad / r) * dy)
newbond.append(bond[7] + (r / r - rad / r) * dz)
if fabs(newbond[2] - newbond[5]) > bound or \
fabs(newbond[3] - newbond[6]) > bound:
continue
newbond = self.transform(newbond, matrix)
if newbond[4] < newbond[7]:
newbond[4] = newbond[7]
olist.append(newbond)
for line in lines:
line[0] = 3
newline = self.transform(line, matrix)
olist.append(newline)
if self.boxflag:
x1, y1, z1 = box[0], box[1], box[2]
x2, y2, z2 = box[3], box[4], box[5]
blines = []
blines.append([4, 0, x1, y1, z1, x1, y1, z2])
blines.append([4, 0, x2, y1, z1, x2, y1, z2])
blines.append([4, 0, x2, y2, z1, x2, y2, z2])
blines.append([4, 0, x1, y2, z1, x1, y2, z2])
blines.append([4, 0, x1, y1, z1, x2, y1, z1])
blines.append([4, 0, x1, y2, z1, x2, y2, z1])
blines.append([4, 0, x1, y2, z2, x2, y2, z2])
blines.append([4, 0, x1, y1, z2, x2, y1, z2])
blines.append([4, 0, x1, y1, z1, x1, y2, z1])
blines.append([4, 0, x2, y1, z1, x2, y2, z1])
blines.append([4, 0, x2, y1, z2, x2, y2, z2])
blines.append([4, 0, x1, y1, z2, x1, y2, z2])
for line in blines:
newline = self.transform(line, matrix)
olist.append(newline)
# convert objects by factor/offset and sort by z-depth
self.convert(olist)
#olist.sort(key=cmprz)
olist.sort(key=lambda x: x[4])
# write SVG file
file += ".svg"
f = open(file, "w")
header = '<?xml version="1.0" encoding="UTF-8"?>'
header += '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"\
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">'
header += '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="%s" width="%s" >' % \
(self.ypixels, self.xpixels)
header += '<g style="fill-opacity:1.0; stroke:black; stroke-width:0.001;">'
print(header, file=f)
color = '<rect x="0" y="0" height="%s" width="%s" ' % \
(self.ypixels, self.xpixels)
color += 'fill="rgb(%s,%s,%s)"/>' % \
(self.bgcol[0] * 255, self.bgcol[1] * 255, self.bgcol[2] * 255)
print(color, file=f)
for element in olist:
self.write(f, 0, element)
for label in self.labels:
self.write(f, 1, label)
footer = "</g></svg>"
print(footer, file=f)
f.close()
def single_as_string(self, file, box, atoms, bonds, tris, lines, scaleflag):
matrix = rotation_matrix('x', -self.ztheta, 'z', 270.0 - self.azphi)
if scaleflag:
self.factor = self.xpixels * self.scale / (1.6 * self.distance)
xctr = 0.5 * (box[0] + box[3])
yctr = 0.5 * (box[1] + box[4])
zctr = 0.5 * (box[2] + box[5])
self.offsetx = matrix[0] * xctr + \
matrix[3] * yctr + matrix[6] * zctr
self.offsety = matrix[1] * xctr + \
matrix[4] * yctr + matrix[7] * zctr
olist = []
for atom in atoms:
atom[0] = 0
newatom = self.transform(atom, matrix)
olist.append(newatom)
for tri in tris:
tri[0] = 1
newtri = self.transform(tri, matrix)
olist.append(newtri)
bound = 0.25 * self.distance
for bond in bonds:
newbond = [2, bond[1]]
dx = bond[5] - bond[2]
dy = bond[6] - bond[3]
dz = bond[7] - bond[4]
r = sqrt(dx * dx + dy * dy + dz * dz)
if not r:
r = 1
rad = self.vizinfo.arad[int(bond[9])]
newbond.append(bond[2] + (r / r - rad / r) * dx)
newbond.append(bond[3] + (r / r - rad / r) * dy)
newbond.append(bond[4] + (r / r - rad / r) * dz)
# cut off second side of bond
dx = bond[2] - bond[5]
dy = bond[3] - bond[6]
dz = bond[4] - bond[7]
r = sqrt(dx * dx + dy * dy + dz * dz)
if not r:
r = 1
rad = self.vizinfo.arad[int(bond[8])]
newbond.append(bond[5] + (r / r - rad / r) * dx)
newbond.append(bond[6] + (r / r - rad / r) * dy)
newbond.append(bond[7] + (r / r - rad / r) * dz)
if fabs(newbond[2] - newbond[5]) > bound or \
fabs(newbond[3] - newbond[6]) > bound:
continue
newbond = self.transform(newbond, matrix)
if newbond[4] < newbond[7]:
newbond[4] = newbond[7]
olist.append(newbond)
for line in lines:
line[0] = 3
newline = self.transform(line, matrix)
olist.append(newline)
if self.boxflag:
x1, y1, z1 = box[0], box[1], box[2]
x2, y2, z2 = box[3], box[4], box[5]
blines = []
blines.append([4, 0, x1, y1, z1, x1, y1, z2])
blines.append([4, 0, x2, y1, z1, x2, y1, z2])
blines.append([4, 0, x2, y2, z1, x2, y2, z2])
blines.append([4, 0, x1, y2, z1, x1, y2, z2])
blines.append([4, 0, x1, y1, z1, x2, y1, z1])
blines.append([4, 0, x1, y2, z1, x2, y2, z1])
blines.append([4, 0, x1, y2, z2, x2, y2, z2])
blines.append([4, 0, x1, y1, z2, x2, y1, z2])
blines.append([4, 0, x1, y1, z1, x1, y2, z1])
blines.append([4, 0, x2, y1, z1, x2, y2, z1])
blines.append([4, 0, x2, y1, z2, x2, y2, z2])
blines.append([4, 0, x1, y1, z2, x1, y2, z2])
for line in blines:
newline = self.transform(line, matrix)
olist.append(newline)
# convert objects by factor/offset and sort by z-depth
self.convert(olist)
#olist.sort(key=cmprz)
olist.sort(key=lambda x: x[4])
# write SVG file
file += ".svg"
#f = open(file, "w")
f = StringIO()
header = '<?xml version="1.0" encoding="UTF-8"?>'
header += '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"\
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">'
header += '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="%s" width="%s" >' % \
(self.ypixels, self.xpixels)
header += '<g style="fill-opacity:1.0; stroke:black; stroke-width:0.001;">'
print(header, file=f)
color = '<rect x="0" y="0" height="%s" width="%s" ' % \
(self.ypixels, self.xpixels)
color += 'fill="rgb(%s,%s,%s)"/>' % \
(self.bgcol[0] * 255, self.bgcol[1] * 255, self.bgcol[2] * 255)
print(color, file=f)
for element in olist:
self.write(f, 0, element)
for label in self.labels:
self.write(f, 1, label)
footer = "</g></svg>"
print(footer, file=f)
content_svg = f.getvalue()
#print(content_svg)
f.close()
return content_svg
# --------------------------------------------------------------------
# rotate with matrix
def transform(self, obj, matrix):
onew = obj[0:2]
if obj[0] == 0: # transform atom
onew.append(
matrix[0] *
obj[2] +
matrix[3] *
obj[3] +
matrix[6] *
obj[4])
onew.append(
matrix[1] *
obj[2] +
matrix[4] *
obj[3] +
matrix[7] *
obj[4])
onew.append(
matrix[2] *
obj[2] +
matrix[5] *
obj[3] +
matrix[8] *
obj[4])
elif obj[0] == 1: # transform triangle
onew.append(
matrix[0] *
obj[2] +
matrix[3] *
obj[3] +
matrix[6] *
obj[4])
onew.append(
matrix[1] *
obj[2] +
matrix[4] *
obj[3] +
matrix[7] *
obj[4])
onew.append(
matrix[2] *
obj[2] +
matrix[5] *
obj[3] +
matrix[8] *
obj[4])
onew.append(
matrix[0] *
obj[5] +
matrix[3] *
obj[6] +
matrix[6] *
obj[7])
onew.append(
matrix[1] *
obj[5] +
matrix[4] *
obj[6] +
matrix[7] *
obj[7])
onew.append(
matrix[2] *
obj[5] +
matrix[5] *
obj[6] +
matrix[8] *
obj[7])
onew.append(
matrix[0] *
obj[8] +
matrix[3] *
obj[9] +
matrix[6] *
obj[10])
onew.append(
matrix[1] *
obj[8] +
matrix[4] *
obj[9] +
matrix[7] *
obj[10])
onew.append(
matrix[2] *
obj[8] +
matrix[5] *
obj[9] +
matrix[8] *
obj[10])
else: # transform bond or line
onew.append(
matrix[0] *
obj[2] +
matrix[3] *
obj[3] +
matrix[6] *
obj[4])
onew.append(
matrix[1] *
obj[2] +
matrix[4] *
obj[3] +
matrix[7] *
obj[4])
onew.append(
matrix[2] *
obj[2] +
matrix[5] *
obj[3] +
matrix[8] *
obj[4])
onew.append(
matrix[0] *
obj[5] +
matrix[3] *
obj[6] +
matrix[6] *
obj[7])
onew.append(
matrix[1] *
obj[5] +
matrix[4] *
obj[6] +
matrix[7] *
obj[7])
onew.append(
matrix[2] *
obj[5] +
matrix[5] *
obj[6] +
matrix[8] *
obj[7])
return onew
# --------------------------------------------------------------------
def convert(self, objlist):
factor = self.factor
offsetx = self.offsetx
offsety = self.offsety
xctr = 0.5 * self.xpixels + self.xshift
yctr = 0.5 * self.ypixels - self.yshift
for obj in objlist:
if obj[0] == 0: # convert atom
obj[2] = factor * (obj[2] - offsetx) + xctr
obj[3] = yctr - factor * (obj[3] - offsety)
elif obj[0] == 1: # convert triangle
obj[2] = factor * (obj[2] - offsetx) + xctr
obj[3] = yctr - factor * (obj[3] - offsety)
obj[5] = factor * (obj[5] - offsetx) + xctr
obj[6] = yctr - factor * (obj[6] - offsety)
obj[8] = factor * (obj[8] - offsetx) + xctr
obj[9] = yctr - factor * (obj[9] - offsety)
else: # convert bond or line
obj[2] = factor * (obj[2] - offsetx) + xctr
obj[3] = yctr - factor * (obj[3] - offsety)
obj[5] = factor * (obj[5] - offsetx) + xctr
obj[6] = yctr - factor * (obj[6] - offsety)
# --------------------------------------------------------------------
def write(self, f, flag, *args):
if len(args):
obj = args[0]
if flag == 0:
if obj[0] == 0: # atom with its color and radius
itype = int(obj[1])
if itype > self.vizinfo.nacolor:
raise Exception("atom type too big")
color = self.vizinfo.acolor[itype]
rad = self.vizinfo.arad[itype]
print(
'<circle cx="%s" cy="%s" r="%s" fill="rgb(%s,%s,%s)" stroke-width="%s" />' %
(obj[2],
obj[3],
rad *
self.factor,
color[0] *
255,
color[1] *
255,
color[2] *
255,
self.thick),
file=f)
elif obj[0] == 1: # tri with its color (need to add fill type)
itype = int(obj[1])
if itype > self.vizinfo.ntcolor:
raise Exception("tri type too big")
color = self.vizinfo.tcolor[itype]
print(
'<polygon points= "%s,%s %s,%s %s,%s" fill="rgb(%s,%s,%s)" stroke="black" stroke-width="0.01" />' %
(obj[2],
obj[3],
obj[5],
obj[6],
obj[8],
obj[9],
color[0] *
255,
color[1] *
255,
color[2] *
255),
file=f)
elif obj[0] == 2: # bond with its color and thickness
itype = int(obj[1])
if itype > self.vizinfo.nbcolor:
raise Exception("bond type too big")
color = self.vizinfo.bcolor[itype]
thick = self.vizinfo.brad[itype]
print(
'<line x1="%s" y1="%s" x2="%s" y2="%s" stroke="rgb(%s,%s,%s)" stroke-width="%s" />' %
(obj[2],
obj[3],
obj[5],
obj[6],
color[0] *
255,
color[1] *
255,
color[2] *
255,
thick *
self.factor),
file=f)
elif obj[0] == 3: # line with its color and thickness
itype = int(obj[1])
if itype > self.vizinfo.nlcolor:
raise Exception("line type too big")
color = self.vizinfo.lcolor[itype]
thick = self.vizinfo.lrad[itype]
print(
'<line x1="%s" y1="%s" x2="%s" y2="%s" stroke="rgb(%s,%s,%s)" stroke-width="%s" />' %
(obj[2],
obj[3],
obj[5],
obj[6],
color[0] *
255,
color[1] *
255,
color[2] *
255,
thick *
self.factor),
file=f)
elif obj[0] == 4: # box line with built-in color and thickness
color = self.bxcol
thick = self.bxthick
print(
'<line x1="%s" y1="%s" x2="%s" y2="%s" stroke="rgb(%s,%s,%s)" stroke-width="%s" />' %
(obj[2],
obj[3],
obj[5],
obj[6],
color[0] *
255,
color[1] *
255,
color[2] *
255,
thick *
self.factor),
file=f)
elif flag == 1:
x = (obj[0] * self.xpixels) + (self.xpixels / 2.0)
y = (self.ypixels / 2.0) - (obj[1] * self.ypixels)
color = obj[4]
print(
'<text x="%s" y="%s" font-size="%s" font-family="%s" stroke="rgb(%s,%s,%s)" fill="rgb(%s,%s,%s"> %s </text>' %
(x,
y,
obj[3],
obj[2],
color[0] *
255,
color[1] *
255,
color[2] *
255,
color[0] *
255,
color[1] *
255,
color[2] *
255,
obj[5]),
file=f)
# --------------------------------------------------------------------
def adef(self):
self.vizinfo.setcolors("atom", list(range(100)), "loop")
self.vizinfo.setradii("atom", list(range(100)), 0.45)
# --------------------------------------------------------------------
def bdef(self):
self.vizinfo.setcolors("bond", list(range(100)), "loop")
self.vizinfo.setradii("bond", list(range(100)), 0.25)
# --------------------------------------------------------------------
def tdef(self):
self.vizinfo.setcolors("tri", list(range(100)), "loop")
self.vizinfo.setfills("tri", list(range(100)), 0)
# --------------------------------------------------------------------
def ldef(self):
self.vizinfo.setcolors("line", list(range(100)), "loop")
self.vizinfo.setradii("line", list(range(100)), 0.25)
# --------------------------------------------------------------------
def acol(self, atypes, colors):
self.vizinfo.setcolors("atom", atypes, colors)
# --------------------------------------------------------------------
def arad(self, atypes, radii):
self.vizinfo.setradii("atom", atypes, radii)
# --------------------------------------------------------------------
def bcol(self, btypes, colors):
self.vizinfo.setcolors("bond", btypes, colors)
# --------------------------------------------------------------------
def brad(self, btypes, radii):
self.vizinfo.setradii("bond", btypes, radii)
# --------------------------------------------------------------------
def tcol(self, ttypes, colors):
self.vizinfo.setcolors("tri", ttypes, colors)
# --------------------------------------------------------------------
def tfill(self, ttypes, flags):
self.vizinfo.setfills("tri", ttypes, flags)
# --------------------------------------------------------------------
def lcol(self, ltypes, colors):
self.vizinfo.setcolors("line", ltypes, colors)
# --------------------------------------------------------------------
def lrad(self, ltypes, radii):
self.vizinfo.setradii("line", ltypes, radii)
# --------------------------------------------------------------------
# compare function for the sort method, orders according to z coordinate
def cmprz(a, b):
if a[4] > b[4]:
return 1
elif a[4] < b[4]:
return -1
elif a[4] == b[4]:
return 0
# --------------------------------------------------------------------
# return characteristic distance of simulation domain = max dimension
def compute_distance(box):
distance = box[3] - box[0]
if box[4] - box[1] > distance:
distance = box[4] - box[1]
if box[5] - box[2] > distance:
distance = box[5] - box[2]
return distance
# --------------------------------------------------------------------
# compute 3x3 rotation matrix for viewing angle
# initially the scene is viewed:
# (1) along the z-axis, looking towards the origin from (0,0,1)
# (2) seeing xy plane, with +y up and +x to the right
# 1st rotation angle rotates the body
# 2nd rotation angle rotates the new body around the new rotated axis
# sign of rotation angles follow right-hand rule
# rotation_matrix(x/y/z,angle1,x/y/z,angle2)
# x/y/z = 1st axis to rotate around
# angle1 = 1st angle to rotate by
# x/y/z = 2nd axis to rotate around
# angle2 = 2nd angle to rotate by
# returns the rotation matrix as a string for now
def rotation_matrix(coord1, angle1, coord2, angle2):
# convert rotation angles to radians
pi = 4.0 * atan(1.0)
angle1 = angle1 / 180.0 * pi
angle2 = angle2 / 180.0 * pi
# sines/cosines of 2 angles
c1 = cos(angle1)
s1 = sin(angle1)
c2 = cos(angle2)
s2 = sin(angle2)
# 1st rotation matrix
a11 = a12 = a13 = a21 = a22 = a23 = a31 = a32 = a33 = 0.0
if coord1 == 'x':
a11 = 1.0
a22 = a33 = c1
a23 = s1
a32 = -s1
elif coord1 == 'y':
a22 = 1.0
a11 = a33 = c1
a13 = s1
a31 = -s1
elif coord1 == 'z':
a33 = 1.0
a12 = a22 = c1
a12 = s1
a21 = -s1
# 2nd rotation matrix
b11 = b12 = b13 = b21 = b22 = b23 = b31 = b32 = b33 = 0.0
if coord2 == 'x':
b11 = 1.0
b22 = b33 = c2
b23 = s2
b32 = -s2
elif coord2 == 'y':
b22 = 1.0
b11 = b33 = c2
b13 = s2
b31 = -s2
elif coord2 == 'z':
b33 = 1.0
b11 = b22 = c2
b12 = s2
b21 = -s2
# full matrix c = b*a
c11 = b11 * a11 + b12 * a21 + b13 * a31
c12 = b11 * a12 + b12 * a22 + b13 * a32
c13 = b11 * a13 + b12 * a23 + b13 * a33
c21 = b21 * a11 + b22 * a21 + b23 * a31
c22 = b21 * a12 + b22 * a22 + b23 * a32
c23 = b21 * a13 + b22 * a23 + b23 * a33
c31 = b31 * a11 + b32 * a21 + b33 * a31
c32 = b31 * a12 + b32 * a22 + b33 * a32
c33 = b31 * a13 + b32 * a23 + b33 * a33
# form rotation matrix
matrix = (c11, c12, c13, c21, c22, c23, c31, c32, c33)
return matrix
# --------------------------------------------------------------------
# fontlist
fontlist = {}
fontlist["t"] = "Times"
fontlist["h"] = "Helvetica"
| 34.075067 | 132 | 0.410018 |
d4d443428089207823c0ae9eb49fb12b69ec283b | 7,529 | py | Python | app/engine/models.py | dariothornhill/adaptive-engine | 209e7a552f8675cdd18f6c1352ede68f6fbf9e36 | [
"Apache-2.0"
] | 31 | 2017-12-08T08:03:27.000Z | 2021-09-15T07:37:38.000Z | app/engine/models.py | dariothornhill/adaptive-engine | 209e7a552f8675cdd18f6c1352ede68f6fbf9e36 | [
"Apache-2.0"
] | 12 | 2018-09-27T13:55:15.000Z | 2021-06-10T19:15:17.000Z | app/engine/models.py | dariothornhill/adaptive-engine | 209e7a552f8675cdd18f6c1352ede68f6fbf9e36 | [
"Apache-2.0"
] | 19 | 2018-10-26T07:12:20.000Z | 2022-02-10T12:51:27.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
def first_and_last_n_chars(s, n1=30, n2=30):
"""
Utility function to display first n1 characters and last n2 characters of a long string
(Adjusts display if string is less than n1+n2 char long)
:param s: string
:return: string for display
"""
first_len = min(len(s), n1)
first = s[:first_len]
last_len = min(len(s) - len(first), n2)
last = s[-last_len:] if last_len > 0 else ''
if first_len == len(s):
return first
elif first_len + last_len == len(s):
return "{}{}".format(first, last)
else:
return "{}...{}".format(first, last)
class Collection(models.Model):
"""
Collection consists of multiple activities
"""
collection_id = models.CharField(max_length=200, unique=True)
name = models.CharField(max_length=200)
max_problems = models.PositiveIntegerField(null=True, blank=True)
def __str__(self):
return "Collection: {} ({})".format(self.collection_id, self.name)
class KnowledgeComponent(models.Model):
kc_id = models.CharField(max_length=200, unique=True)
name = models.CharField(max_length=200)
mastery_prior = models.FloatField(null=True, blank=True)
def __str__(self):
return "KC: {} ({})".format(self.kc_id, self.name)
class PrerequisiteRelation(models.Model):
prerequisite = models.ForeignKey(
KnowledgeComponent,
on_delete=models.CASCADE,
related_name="dependent_relation"
)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "PrerequisiteRelation: {} (prereq) -> {} = {}".format(
self.prerequisite.kc_id,
self.knowledge_component.kc_id,
self.value
)
class Activity(models.Model):
"""
Activity model
"""
url = models.CharField(max_length=500, default='')
name = models.CharField(max_length=200, default='')
collections = models.ManyToManyField(Collection, blank=True)
knowledge_components = models.ManyToManyField(KnowledgeComponent, blank=True)
difficulty = models.FloatField(null=True,blank=True)
tags = models.TextField(default='', blank=True)
type = models.CharField(max_length=200, default='', blank=True)
# whether to include as valid problem to recommend from adaptive engine
include_adaptive = models.BooleanField(default=True)
# order for non-adaptive problems
nonadaptive_order = models.PositiveIntegerField(null=True, blank=True)
# order for pre-adaptive problems
preadaptive_order = models.PositiveIntegerField(null=True, blank=True)
# prerequisite activities - used to designate activities that should be served before
prerequisite_activities = models.ManyToManyField('self', blank=True, symmetrical=False)
def __str__(self):
return "Activity: {} ({})".format(first_and_last_n_chars(self.url, 40, 10), self.name)
class EngineSettings(models.Model):
name = models.CharField(max_length=200, default='')
r_star = models.FloatField() # Threshold for forgiving lower odds of mastering pre-requisite LOs.
L_star = models.FloatField() # Threshold logarithmic odds. If mastery logarithmic odds are >= than L_star, the LO is considered mastered
W_p = models.FloatField() # Importance of readiness in recommending the next item
W_r = models.FloatField() # Importance of demand in recommending the next item
W_c = models.FloatField() # Importance of continuity in recommending the next item
W_d = models.FloatField() # Importance of appropriate difficulty in recommending the next item
def __str__(self):
return "EngineSettings: {}".format(self.name if self.name else self.pk)
class ExperimentalGroup(models.Model):
name = models.CharField(max_length=200,default='')
weight = models.FloatField(default=0)
engine_settings = models.ForeignKey(
EngineSettings,
on_delete=models.SET_NULL,
blank=True,
null=True
)
def __str__(self):
return "Experimental Group {}".format(self.name if self.name else self.pk)
class Learner(models.Model):
"""
User model for students
"""
user_id = models.CharField(max_length=200, default='')
tool_consumer_instance_guid = models.CharField(max_length=200, default='')
experimental_group = models.ForeignKey(
ExperimentalGroup,
on_delete=models.SET_NULL,
null=True,
blank=True,
)
class Meta:
unique_together = (('user_id', 'tool_consumer_instance_guid'),)
def __str__(self):
return "Learner: {}/{}".format(
self.user_id or "<user_id>",
self.tool_consumer_instance_guid or "<tool_consumer_instance_guid>"
)
class Score(models.Model):
"""
Score resulting from a learner's attempt on an activity
"""
learner = models.ForeignKey(Learner, on_delete=models.CASCADE)
activity = models.ForeignKey(Activity, on_delete=models.CASCADE)
# score value
score = models.FloatField()
# creation time
timestamp = models.DateTimeField(null=True, auto_now_add=True)
def __str__(self):
return "Score: {} [{} - {}]".format(
self.score, self.learner, self.activity)
class Transit(models.Model):
activity = models.ForeignKey(Activity, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "Transit: {} [{} - {}]".format(
self.value, self.activity, self.knowledge_component)
class Guess(models.Model):
activity = models.ForeignKey(Activity, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "Guess: {} [{} - {}]".format(
self.value, self.activity, self.knowledge_component)
class Slip(models.Model):
activity = models.ForeignKey(Activity, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "Slip: {} [{} - {}]".format(
self.value, self.activity, self.knowledge_component)
class Mastery(models.Model):
learner = models.ForeignKey(Learner, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "Mastery: {} [{} - {}]".format(
self.value, self.learner, self.knowledge_component)
class Exposure(models.Model):
learner = models.ForeignKey(Learner, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.IntegerField()
def __str__(self):
return "Exposure: {} [{} - {}]".format(
self.value, self.learner, self.knowledge_component)
class Confidence(models.Model):
learner = models.ForeignKey(Learner, on_delete=models.CASCADE)
knowledge_component = models.ForeignKey(KnowledgeComponent, on_delete=models.CASCADE)
value = models.FloatField()
def __str__(self):
return "Confidence: {} [{} - {}]".format(
self.value, self.learner, self.knowledge_component)
| 35.182243 | 141 | 0.687874 |
0b0268ac9abdd7d0d003a3ac00a205e70f45849f | 374 | py | Python | old/diversity_over_time_greed_test.py | Jontahan/numedal | e71ff6551926e37ce7b83c46fc9fa3504774f2d0 | [
"MIT"
] | null | null | null | old/diversity_over_time_greed_test.py | Jontahan/numedal | e71ff6551926e37ce7b83c46fc9fa3504774f2d0 | [
"MIT"
] | null | null | null | old/diversity_over_time_greed_test.py | Jontahan/numedal | e71ff6551926e37ce7b83c46fc9fa3504774f2d0 | [
"MIT"
] | null | null | null | from metrics.dmetric_over_time_greed import *
from util.experiment import Experiment
exp = Experiment('diversity_greed_test_setC')
env_list = []
for j in [10,11,12,13,14]:
env_list.append(Gridworld(width=4, height=4, cell_size=32, seed=j))
exp.run(get_diversity, params={ 'env_list' : env_list, 'training_iterations' : 100, 'steps' : 50, 'verbose' : True }, k=5) | 34 | 122 | 0.724599 |
3aa14b59096c7ef2d46aca982e991e4b43caaa7a | 28,670 | py | Python | ltr/data/sampler.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 12 | 2021-07-27T07:18:24.000Z | 2022-03-09T13:52:20.000Z | ltr/data/sampler.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 2 | 2021-08-03T09:21:33.000Z | 2021-12-29T14:25:30.000Z | ltr/data/sampler.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
] | 3 | 2021-11-18T14:46:40.000Z | 2022-01-03T15:47:23.000Z | import random
import torch.utils.data
from extern.pytracking.libs import TensorDict
def no_processing(data):
return data
class TrackingSampler(torch.utils.data.Dataset):
""" Class responsible for sampling frames from training sequences to form batches. Each training sample is a
tuple consisting of i) a set of train frames, used to learn the DiMP classification model and obtain the
modulation vector for IoU-Net, and ii) a set of test frames on which target classification loss for the predicted
DiMP model, and the IoU prediction loss for the IoU-Net is calculated.
The sampling is done in the following ways. First a dataset is selected at random. Next, a sequence is selected
from that dataset. A base frame is then sampled randomly from the sequence. Next, a set of 'train frames' and
'test frames' are sampled from the sequence from the range [base_frame_id - max_gap, base_frame_id] and
(base_frame_id, base_frame_id + max_gap] respectively. Only the frames in which the target is visible are sampled.
If enough visible frames are not found, the 'max_gap' is increased gradually till enough frames are found.
The sampled frames are then passed through the input 'processing' function for the necessary processing-
"""
def __init__(self, datasets, p_datasets, samples_per_epoch, max_gap,
num_test_frames, num_train_frames=1, processing=no_processing, frame_sample_mode='causal'):
"""
args:
datasets - List of datasets to be used for training
p_datasets - List containing the probabilities by which each dataset will be sampled
samples_per_epoch - Number of training samples per epoch
max_gap - Maximum gap, in frame numbers, between the train frames and the test frames.
num_test_frames - Number of test frames to sample.
num_train_frames - Number of train frames to sample.
processing - An instance of Processing class which performs the necessary processing of the data.
frame_sample_mode - Either 'causal' or 'interval'. If 'causal', then the test frames are sampled in a causally,
otherwise randomly within the interval.
"""
self.datasets = datasets
# If p not provided, sample uniformly from all videos
if p_datasets is None:
p_datasets = [len(d) for d in self.datasets]
# Normalize
p_total = sum(p_datasets)
self.p_datasets = [x / p_total for x in p_datasets]
self.samples_per_epoch = samples_per_epoch
self.max_gap = max_gap
self.num_test_frames = num_test_frames
self.num_train_frames = num_train_frames
self.processing = processing
self.frame_sample_mode = frame_sample_mode
def __len__(self):
return self.samples_per_epoch
def _sample_visible_ids(self, visible, num_ids=1, min_id=None, max_id=None):
""" Samples num_ids frames between min_id and max_id for which target is visible
args:
visible - 1d Tensor indicating whether target is visible for each frame
num_ids - number of frames to be samples
min_id - Minimum allowed frame number
max_id - Maximum allowed frame number
returns:
list - List of sampled frame numbers. None if not sufficient visible frames could be found.
"""
if num_ids == 0:
return []
if min_id is None or min_id < 0:
min_id = 0
if max_id is None or max_id > len(visible):
max_id = len(visible)
valid_ids = [i for i in range(min_id, max_id) if visible[i]]
# No visible ids
if len(valid_ids) == 0:
return None
return random.choices(valid_ids, k=num_ids)
def __getitem__(self, index):
"""
args:
index (int): Index (Ignored since we sample randomly)
returns:
TensorDict - dict containing all the data blocks
"""
# Select a dataset
dataset = random.choices(self.datasets, self.p_datasets)[0]
is_video_dataset = dataset.is_video_sequence()
# Sample a sequence with enough visible frames
enough_visible_frames = False
while not enough_visible_frames:
# Sample a sequence
seq_id = random.randint(0, dataset.get_num_sequences() - 1)
# Sample frames
seq_info_dict = dataset.get_sequence_info(seq_id)
visible = seq_info_dict['visible']
enough_visible_frames = visible.type(torch.int64).sum().item() > 2 * (
self.num_test_frames + self.num_train_frames) and len(visible) >= 20
enough_visible_frames = enough_visible_frames or not is_video_dataset
if is_video_dataset:
train_frame_ids = None
test_frame_ids = None
gap_increase = 0
if self.frame_sample_mode == 'interval':
# Sample frame numbers within interval defined by the first frame
while test_frame_ids is None:
base_frame_id = self._sample_visible_ids(visible, num_ids=1)
extra_train_frame_ids = self._sample_visible_ids(visible, num_ids=self.num_train_frames - 1,
min_id=base_frame_id[
0] - self.max_gap - gap_increase,
max_id=base_frame_id[
0] + self.max_gap + gap_increase)
if extra_train_frame_ids is None:
gap_increase += 5
continue
train_frame_ids = base_frame_id + extra_train_frame_ids
test_frame_ids = self._sample_visible_ids(visible, num_ids=self.num_test_frames,
min_id=train_frame_ids[0] - self.max_gap - gap_increase,
max_id=train_frame_ids[0] + self.max_gap + gap_increase)
gap_increase += 5 # Increase gap until a frame is found
elif self.frame_sample_mode == 'causal':
# Sample test and train frames in a causal manner, i.e. test_frame_ids > train_frame_ids
while test_frame_ids is None:
base_frame_id = self._sample_visible_ids(visible, num_ids=1, min_id=self.num_train_frames - 1,
max_id=len(visible) - self.num_test_frames)
prev_frame_ids = self._sample_visible_ids(visible, num_ids=self.num_train_frames - 1,
min_id=base_frame_id[0] - self.max_gap - gap_increase,
max_id=base_frame_id[0])
if prev_frame_ids is None:
gap_increase += 5
continue
train_frame_ids = base_frame_id + prev_frame_ids
test_frame_ids = self._sample_visible_ids(visible, min_id=train_frame_ids[0] + 1,
max_id=train_frame_ids[0] + self.max_gap + gap_increase,
num_ids=self.num_test_frames)
# Increase gap until a frame is found
gap_increase += 5
else:
# In case of image dataset, just repeat the image to generate synthetic video
train_frame_ids = [1] * self.num_train_frames
test_frame_ids = [1] * self.num_test_frames
train_frames, train_anno, meta_obj_train = dataset.get_frames(seq_id, train_frame_ids, seq_info_dict)
test_frames, test_anno, meta_obj_test = dataset.get_frames(seq_id, test_frame_ids, seq_info_dict)
data = TensorDict({'train_images': train_frames,
'train_anno': train_anno['bbox'],
'test_images': test_frames,
'test_anno': test_anno['bbox'],
'dataset': dataset.get_name(),
'test_class': meta_obj_test.get('object_class_name')})
return self.processing(data)
class DiMPSampler(TrackingSampler):
""" See TrackingSampler."""
def __init__(self, datasets, p_datasets, samples_per_epoch, max_gap,
num_test_frames, num_train_frames=1, processing=no_processing, frame_sample_mode='causal'):
super().__init__(datasets=datasets, p_datasets=p_datasets, samples_per_epoch=samples_per_epoch, max_gap=max_gap,
num_test_frames=num_test_frames, num_train_frames=num_train_frames, processing=processing,
frame_sample_mode=frame_sample_mode)
class ATOMSampler(TrackingSampler):
""" See TrackingSampler."""
def __init__(self, datasets, p_datasets, samples_per_epoch, max_gap,
num_test_frames=1, num_train_frames=1, processing=no_processing, frame_sample_mode='interval'):
super().__init__(datasets=datasets, p_datasets=p_datasets, samples_per_epoch=samples_per_epoch, max_gap=max_gap,
num_test_frames=num_test_frames, num_train_frames=num_train_frames, processing=processing,
frame_sample_mode=frame_sample_mode)
class LWLSampler(torch.utils.data.Dataset):
""" Class responsible for sampling frames from training sequences to form batches. Each training sample is a
tuple consisting of i) a set of train frames and ii) a set of test frames. The train frames, along with the
ground-truth masks, are passed to the few-shot learner to obtain the target model parameters \tau. The test frames
are used to compute the prediction accuracy.
The sampling is done in the following ways. First a dataset is selected at random. Next, a sequence is randomly
selected from that dataset. A base frame is then sampled randomly from the sequence. The 'train frames'
are then sampled from the sequence from the range [base_frame_id - max_gap, base_frame_id], and the 'test frames'
are sampled from the sequence from the range (base_frame_id, base_frame_id + max_gap] respectively. Only the frames
in which the target is visible are sampled. If enough visible frames are not found, the 'max_gap' is increased
gradually until enough frames are found. Both the 'train frames' and the 'test frames' are sorted to preserve the
temporal order.
The sampled frames are then passed through the input 'processing' function for the necessary processing-
"""
def __init__(self, datasets, p_datasets, samples_per_epoch, max_gap,
num_test_frames, num_train_frames=1, processing=no_processing, p_reverse=None):
"""
args:
datasets - List of datasets to be used for training
p_datasets - List containing the probabilities by which each dataset will be sampled
samples_per_epoch - Number of training samples per epoch
max_gap - Maximum gap, in frame numbers, between the train frames and the test frames.
num_test_frames - Number of test frames to sample.
num_train_frames - Number of train frames to sample.
processing - An instance of Processing class which performs the necessary processing of the data.
p_reverse - Probability that a sequence is temporally reversed
"""
self.datasets = datasets
# If p not provided, sample uniformly from all videos
if p_datasets is None:
p_datasets = [len(d) for d in self.datasets]
# Normalize
p_total = sum(p_datasets)
self.p_datasets = [x/p_total for x in p_datasets]
self.samples_per_epoch = samples_per_epoch
self.max_gap = max_gap
self.num_test_frames = num_test_frames
self.num_train_frames = num_train_frames
self.processing = processing
self.p_reverse = p_reverse
def __len__(self):
return self.samples_per_epoch
def _sample_visible_ids(self, visible, num_ids=1, min_id=None, max_id=None):
""" Samples num_ids frames between min_id and max_id for which target is visible
args:
visible - 1d Tensor indicating whether target is visible for each frame
num_ids - number of frames to be samples
min_id - Minimum allowed frame number
max_id - Maximum allowed frame number
returns:
list - List of sampled frame numbers. None if not sufficient visible frames could be found.
"""
if min_id is None or min_id < 0:
min_id = 0
if max_id is None or max_id > len(visible):
max_id = len(visible)
valid_ids = [i for i in range(min_id, max_id) if visible[i]]
# No visible ids
if len(valid_ids) == 0:
return None
return random.choices(valid_ids, k=num_ids)
def __getitem__(self, index):
"""
args:
index (int): Index (dataset index)
returns:
TensorDict - dict containing all the data blocks
"""
# Select a dataset
dataset = random.choices(self.datasets, self.p_datasets)[0]
is_video_dataset = dataset.is_video_sequence()
reverse_sequence = False
if self.p_reverse is not None:
reverse_sequence = random.random() < self.p_reverse
# Sample a sequence with enough visible frames
enough_visible_frames = False
while not enough_visible_frames:
# Sample a sequence
seq_id = random.randint(0, dataset.get_num_sequences() - 1)
# Sample frames
seq_info_dict = dataset.get_sequence_info(seq_id)
visible = seq_info_dict['visible']
enough_visible_frames = visible.type(torch.int64).sum().item() > 2 * (self.num_test_frames + self.num_train_frames)
enough_visible_frames = enough_visible_frames or not is_video_dataset
if is_video_dataset:
train_frame_ids = None
test_frame_ids = None
gap_increase = 0
# Sample test and train frames in a causal manner, i.e. test_frame_ids > train_frame_ids
while test_frame_ids is None:
if gap_increase > 1000:
raise Exception('Frame not found')
if not reverse_sequence:
base_frame_id = self._sample_visible_ids(visible, num_ids=1, min_id=self.num_train_frames - 1,
max_id=len(visible)-self.num_test_frames)
prev_frame_ids = self._sample_visible_ids(visible, num_ids=self.num_train_frames - 1,
min_id=base_frame_id[0] - self.max_gap - gap_increase,
max_id=base_frame_id[0])
if prev_frame_ids is None:
gap_increase += 5
continue
train_frame_ids = base_frame_id + prev_frame_ids
test_frame_ids = self._sample_visible_ids(visible, min_id=train_frame_ids[0]+1,
max_id=train_frame_ids[0] + self.max_gap + gap_increase,
num_ids=self.num_test_frames)
# Increase gap until a frame is found
gap_increase += 5
else:
# Sample in reverse order, i.e. train frames come after the test frames
base_frame_id = self._sample_visible_ids(visible, num_ids=1, min_id=self.num_test_frames + 1,
max_id=len(visible) - self.num_train_frames - 1)
prev_frame_ids = self._sample_visible_ids(visible, num_ids=self.num_train_frames - 1,
min_id=base_frame_id[0],
max_id=base_frame_id[0] + self.max_gap + gap_increase)
if prev_frame_ids is None:
gap_increase += 5
continue
train_frame_ids = base_frame_id + prev_frame_ids
test_frame_ids = self._sample_visible_ids(visible, min_id=0,
max_id=train_frame_ids[0] - 1,
num_ids=self.num_test_frames)
# Increase gap until a frame is found
gap_increase += 5
else:
# In case of image dataset, just repeat the image to generate synthetic video
train_frame_ids = [1]*self.num_train_frames
test_frame_ids = [1]*self.num_test_frames
# Sort frames
train_frame_ids = sorted(train_frame_ids, reverse=reverse_sequence)
test_frame_ids = sorted(test_frame_ids, reverse=reverse_sequence)
all_frame_ids = train_frame_ids + test_frame_ids
# Load frames
all_frames, all_anno, meta_obj = dataset.get_frames(seq_id, all_frame_ids, seq_info_dict)
train_frames = all_frames[:len(train_frame_ids)]
test_frames = all_frames[len(train_frame_ids):]
train_anno = {}
test_anno = {}
for key, value in all_anno.items():
train_anno[key] = value[:len(train_frame_ids)]
test_anno[key] = value[len(train_frame_ids):]
train_masks = train_anno['mask'] if 'mask' in train_anno else None
test_masks = test_anno['mask'] if 'mask' in test_anno else None
data = TensorDict({'train_images': train_frames,
'train_masks': train_masks,
'train_anno': train_anno['bbox'],
'test_images': test_frames,
'test_masks': test_masks,
'test_anno': test_anno['bbox'],
'dataset': dataset.get_name()})
return self.processing(data)
class KYSSampler(torch.utils.data.Dataset):
def __init__(self, datasets, p_datasets, samples_per_epoch, sequence_sample_info, processing=no_processing,
sample_occluded_sequences=False):
"""
args:
datasets - List of datasets to be used for training
p_datasets - List containing the probabilities by which each dataset will be sampled
samples_per_epoch - Number of training samples per epoch
sequence_sample_info - A dict containing information about how to sample a sequence, e.g. number of frames,
max gap between frames, etc.
processing - An instance of Processing class which performs the necessary processing of the data.
sample_occluded_sequences - If true, sub-sequence containing occlusion is sampled whenever possible
"""
self.datasets = datasets
# If p not provided, sample uniformly from all videos
if p_datasets is None:
p_datasets = [1 for d in self.datasets]
# Normalize
p_total = sum(p_datasets)
self.p_datasets = [x/p_total for x in p_datasets]
self.samples_per_epoch = samples_per_epoch
self.sequence_sample_info = sequence_sample_info
self.processing = processing
self.sample_occluded_sequences = sample_occluded_sequences
def __len__(self):
return self.samples_per_epoch
def _sample_ids(self, valid, num_ids=1, min_id=None, max_id=None):
""" Samples num_ids frames between min_id and max_id for which target is visible
args:
visible - 1d Tensor indicating whether target is visible for each frame
num_ids - number of frames to be samples
min_id - Minimum allowed frame number
max_id - Maximum allowed frame number
returns:
list - List of sampled frame numbers. None if not sufficient visible frames could be found.
"""
if min_id is None or min_id < 0:
min_id = 0
if max_id is None or max_id > len(valid):
max_id = len(valid)
valid_ids = [i for i in range(min_id, max_id) if valid[i]]
# No visible ids
if len(valid_ids) == 0:
return None
return random.choices(valid_ids, k=num_ids)
def find_occlusion_end_frame(self, first_occ_frame, target_not_fully_visible):
for i in range(first_occ_frame, len(target_not_fully_visible)):
if not target_not_fully_visible[i]:
return i
return len(target_not_fully_visible)
def __getitem__(self, index):
"""
args:
index (int): Index (Ignored since we sample randomly)
returns:
TensorDict - dict containing all the data blocks
"""
# Select a dataset
p_datasets = self.p_datasets
dataset = random.choices(self.datasets, p_datasets)[0]
is_video_dataset = dataset.is_video_sequence()
#print(dataset)
num_train_frames = self.sequence_sample_info['num_train_frames']
num_test_frames = self.sequence_sample_info['num_test_frames']
max_train_gap = self.sequence_sample_info['max_train_gap']
allow_missing_target = self.sequence_sample_info['allow_missing_target']
min_fraction_valid_frames = self.sequence_sample_info.get('min_fraction_valid_frames', 0.0)
if allow_missing_target:
min_visible_frames = 0
else:
raise NotImplementedError
valid_sequence = False
# Sample a sequence with enough visible frames and get anno for the same
while not valid_sequence:
seq_id = random.randint(0, dataset.get_num_sequences() - 1)
seq_info_dict = dataset.get_sequence_info(seq_id)
visible = seq_info_dict['visible']
visible_ratio = seq_info_dict.get('visible_ratio', visible)
num_visible = visible.type(torch.int64).sum().item()
enough_visible_frames = not is_video_dataset or (num_visible > min_visible_frames and len(visible) >= 20)
valid_sequence = enough_visible_frames
if self.sequence_sample_info['mode'] == 'Sequence':
if is_video_dataset:
train_frame_ids = None
test_frame_ids = None
gap_increase = 0
test_valid_image = torch.zeros(num_test_frames, dtype=torch.int8)
# Sample frame numbers in a causal manner, i.e. test_frame_ids > train_frame_ids
while test_frame_ids is None:
occlusion_sampling = False
if dataset.has_occlusion_info() and self.sample_occluded_sequences:
target_not_fully_visible = visible_ratio < 0.9
if target_not_fully_visible.float().sum() > 0:
occlusion_sampling = True
if occlusion_sampling:
first_occ_frame = target_not_fully_visible.nonzero()[0]
occ_end_frame = self.find_occlusion_end_frame(first_occ_frame, target_not_fully_visible)
# Make sure target visible in first frame
base_frame_id = self._sample_ids(visible, num_ids=1, min_id=max(0, first_occ_frame - 20),
max_id=first_occ_frame - 5)
if base_frame_id is None:
base_frame_id = 0
else:
base_frame_id = base_frame_id[0]
prev_frame_ids = self._sample_ids(visible, num_ids=num_train_frames,
min_id=base_frame_id - max_train_gap - gap_increase - 1,
max_id=base_frame_id - 1)
if prev_frame_ids is None:
if base_frame_id - max_train_gap - gap_increase - 1 < 0:
prev_frame_ids = [base_frame_id] * num_train_frames
else:
gap_increase += 5
continue
train_frame_ids = prev_frame_ids
end_frame = min(occ_end_frame + random.randint(5, 20), len(visible) - 1)
if (end_frame - base_frame_id) < num_test_frames:
rem_frames = num_test_frames - (end_frame - base_frame_id)
end_frame = random.randint(end_frame, min(len(visible) - 1, end_frame + rem_frames))
base_frame_id = max(0, end_frame - num_test_frames + 1)
end_frame = min(end_frame, len(visible) - 1)
step_len = float(end_frame - base_frame_id) / float(num_test_frames)
test_frame_ids = [base_frame_id + int(x * step_len) for x in range(0, num_test_frames)]
test_valid_image[:len(test_frame_ids)] = 1
test_frame_ids = test_frame_ids + [0] * (num_test_frames - len(test_frame_ids))
else:
# Make sure target visible in first frame
base_frame_id = self._sample_ids(visible, num_ids=1, min_id=2*num_train_frames,
max_id=len(visible) - int(num_test_frames * min_fraction_valid_frames))
if base_frame_id is None:
base_frame_id = 0
else:
base_frame_id = base_frame_id[0]
prev_frame_ids = self._sample_ids(visible, num_ids=num_train_frames,
min_id=base_frame_id - max_train_gap - gap_increase - 1,
max_id=base_frame_id - 1)
if prev_frame_ids is None:
if base_frame_id - max_train_gap - gap_increase - 1 < 0:
prev_frame_ids = [base_frame_id] * num_train_frames
else:
gap_increase += 5
continue
train_frame_ids = prev_frame_ids
test_frame_ids = list(range(base_frame_id, min(len(visible), base_frame_id + num_test_frames)))
test_valid_image[:len(test_frame_ids)] = 1
test_frame_ids = test_frame_ids + [0]*(num_test_frames - len(test_frame_ids))
else:
raise NotImplementedError
else:
raise NotImplementedError
# Get frames
train_frames, train_anno_dict, _ = dataset.get_frames(seq_id, train_frame_ids, seq_info_dict)
#print(train_frames)
train_anno = train_anno_dict['bbox']
test_frames, test_anno_dict, _ = dataset.get_frames(seq_id, test_frame_ids, seq_info_dict)
test_anno = test_anno_dict['bbox']
test_valid_anno = test_anno_dict['valid']
test_visible = test_anno_dict['visible']
test_visible_ratio = test_anno_dict.get('visible_ratio', torch.ones(len(test_visible)))
# Prepare data
data = TensorDict({'train_images': train_frames,
'train_anno': train_anno,
'test_images': test_frames,
'test_anno': test_anno,
'test_valid_anno': test_valid_anno,
'test_visible': test_visible,
'test_valid_image': test_valid_image,
'test_visible_ratio': test_visible_ratio,
'dataset': dataset.get_name()})
# Send for processing
return self.processing(data)
| 47.783333 | 128 | 0.589327 |
e6315aac972aa97dc9bb12d15ee123a8d57d75f2 | 6,946 | py | Python | tests/components/blink/test_config_flow.py | SmarthomeNinja/core | f4b8a95205ea7d4126fc5e704da532cd8eed937e | [
"Apache-2.0"
] | 6 | 2020-07-18T16:33:25.000Z | 2021-09-26T09:52:04.000Z | tests/components/blink/test_config_flow.py | SmarthomeNinja/core | f4b8a95205ea7d4126fc5e704da532cd8eed937e | [
"Apache-2.0"
] | 47 | 2020-07-23T07:14:33.000Z | 2022-03-31T06:01:46.000Z | tests/components/blink/test_config_flow.py | SmarthomeNinja/core | f4b8a95205ea7d4126fc5e704da532cd8eed937e | [
"Apache-2.0"
] | 3 | 2021-05-18T16:42:18.000Z | 2021-07-19T22:04:21.000Z | """Test the Blink config flow."""
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.blink import DOMAIN
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.blink.config_flow.Blink",
return_value=Mock(
get_auth_token=Mock(return_value=True),
key_required=False,
login_response={},
),
), patch(
"homeassistant.components.blink.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.blink.async_setup_entry", return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": "blink@example.com", "password": "example"},
)
assert result2["type"] == "create_entry"
assert result2["title"] == "blink"
assert result2["result"].unique_id == "blink@example.com"
assert result2["data"] == {
"username": "blink@example.com",
"password": "example",
"login_response": {},
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import(hass):
"""Test we import the config."""
with patch(
"homeassistant.components.blink.config_flow.Blink",
return_value=Mock(
get_auth_token=Mock(return_value=True),
key_required=False,
login_response={},
),
), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"username": "blink@example.com",
"password": "example",
"scan_interval": 10,
},
)
assert result["type"] == "create_entry"
assert result["title"] == "blink"
assert result["result"].unique_id == "blink@example.com"
assert result["data"] == {
"username": "blink@example.com",
"password": "example",
"scan_interval": 10,
"login_response": {},
}
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_2fa(hass):
"""Test we get the 2fa form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_blink = Mock(
get_auth_token=Mock(return_value=True),
key_required=True,
login_response={},
login_handler=Mock(send_auth_key=Mock(return_value=True)),
)
with patch(
"homeassistant.components.blink.config_flow.Blink", return_value=mock_blink
), patch(
"homeassistant.components.blink.async_setup", return_value=True
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": "blink@example.com", "password": "example"}
)
assert result2["type"] == "form"
assert result2["step_id"] == "2fa"
mock_blink.key_required = False
with patch(
"homeassistant.components.blink.config_flow.Blink", return_value=mock_blink
), patch(
"homeassistant.components.blink.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.blink.async_setup_entry", return_value=True
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"}
)
assert result3["type"] == "create_entry"
assert result3["title"] == "blink"
assert result3["result"].unique_id == "blink@example.com"
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.blink.config_flow.Blink.get_auth_token",
return_value=None,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": "blink@example.com", "password": "example"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_unknown_error(hass):
"""Test we handle unknown error at startup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.blink.config_flow.Blink.get_auth_token",
return_value=None,
), patch(
"homeassistant.components.blink.config_flow.validate_input",
side_effect=KeyError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": "blink@example.com", "password": "example"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_options_flow(hass):
"""Test config flow options."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
"username": "blink@example.com",
"password": "example",
"login_response": {},
},
options={},
entry_id=1,
)
config_entry.add_to_hass(hass)
mock_blink = Mock(
login_handler=True,
setup_params=Mock(return_value=True),
setup_post_verify=Mock(return_value=True),
)
with patch("homeassistant.components.blink.Blink", return_value=mock_blink):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(
config_entry.entry_id, context={"show_advanced_options": False}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "simple_options"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={"scan_interval": 5},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {"scan_interval": 5}
assert mock_blink.refresh_rate == 5
| 33.882927 | 88 | 0.648575 |
fa0f72f362b45a039ed58e6280fc2a574468d780 | 27,206 | py | Python | google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py | renovate-bot/python-video-stitcher | ad5da37511e2dbe78eb4651f705660fab9eb745d | [
"Apache-2.0"
] | null | null | null | google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py | renovate-bot/python-video-stitcher | ad5da37511e2dbe78eb4651f705660fab9eb745d | [
"Apache-2.0"
] | 6 | 2022-02-23T16:49:56.000Z | 2022-03-07T16:42:45.000Z | google/cloud/video/stitcher_v1/services/video_stitcher_service/pagers.py | renovate-bot/python-video-stitcher | ad5da37511e2dbe78eb4651f705660fab9eb745d | [
"Apache-2.0"
] | 2 | 2022-02-14T22:26:33.000Z | 2022-03-08T17:18:00.000Z | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Iterator,
Optional,
Sequence,
Tuple,
)
from google.cloud.video.stitcher_v1.types import (
ad_tag_details,
cdn_keys,
slates,
stitch_details,
video_stitcher_service,
)
class ListCdnKeysPager:
"""A pager for iterating through ``list_cdn_keys`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListCdnKeysResponse` object, and
provides an ``__iter__`` method to iterate through its
``cdn_keys`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListCdnKeys`` requests and continue to iterate
through the ``cdn_keys`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListCdnKeysResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., video_stitcher_service.ListCdnKeysResponse],
request: video_stitcher_service.ListCdnKeysRequest,
response: video_stitcher_service.ListCdnKeysResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListCdnKeysRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListCdnKeysResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListCdnKeysRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[video_stitcher_service.ListCdnKeysResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[cdn_keys.CdnKey]:
for page in self.pages:
yield from page.cdn_keys
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCdnKeysAsyncPager:
"""A pager for iterating through ``list_cdn_keys`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListCdnKeysResponse` object, and
provides an ``__aiter__`` method to iterate through its
``cdn_keys`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListCdnKeys`` requests and continue to iterate
through the ``cdn_keys`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListCdnKeysResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[video_stitcher_service.ListCdnKeysResponse]],
request: video_stitcher_service.ListCdnKeysRequest,
response: video_stitcher_service.ListCdnKeysResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListCdnKeysRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListCdnKeysResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListCdnKeysRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[video_stitcher_service.ListCdnKeysResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[cdn_keys.CdnKey]:
async def async_generator():
async for page in self.pages:
for response in page.cdn_keys:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListVodStitchDetailsPager:
"""A pager for iterating through ``list_vod_stitch_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse` object, and
provides an ``__iter__`` method to iterate through its
``vod_stitch_details`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListVodStitchDetails`` requests and continue to iterate
through the ``vod_stitch_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., video_stitcher_service.ListVodStitchDetailsResponse],
request: video_stitcher_service.ListVodStitchDetailsRequest,
response: video_stitcher_service.ListVodStitchDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListVodStitchDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[video_stitcher_service.ListVodStitchDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[stitch_details.VodStitchDetail]:
for page in self.pages:
yield from page.vod_stitch_details
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListVodStitchDetailsAsyncPager:
"""A pager for iterating through ``list_vod_stitch_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``vod_stitch_details`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListVodStitchDetails`` requests and continue to iterate
through the ``vod_stitch_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[video_stitcher_service.ListVodStitchDetailsResponse]
],
request: video_stitcher_service.ListVodStitchDetailsRequest,
response: video_stitcher_service.ListVodStitchDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListVodStitchDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListVodStitchDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListVodStitchDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[video_stitcher_service.ListVodStitchDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[stitch_details.VodStitchDetail]:
async def async_generator():
async for page in self.pages:
for response in page.vod_stitch_details:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListVodAdTagDetailsPager:
"""A pager for iterating through ``list_vod_ad_tag_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse` object, and
provides an ``__iter__`` method to iterate through its
``vod_ad_tag_details`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListVodAdTagDetails`` requests and continue to iterate
through the ``vod_ad_tag_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., video_stitcher_service.ListVodAdTagDetailsResponse],
request: video_stitcher_service.ListVodAdTagDetailsRequest,
response: video_stitcher_service.ListVodAdTagDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListVodAdTagDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[video_stitcher_service.ListVodAdTagDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[ad_tag_details.VodAdTagDetail]:
for page in self.pages:
yield from page.vod_ad_tag_details
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListVodAdTagDetailsAsyncPager:
"""A pager for iterating through ``list_vod_ad_tag_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``vod_ad_tag_details`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListVodAdTagDetails`` requests and continue to iterate
through the ``vod_ad_tag_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[video_stitcher_service.ListVodAdTagDetailsResponse]
],
request: video_stitcher_service.ListVodAdTagDetailsRequest,
response: video_stitcher_service.ListVodAdTagDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListVodAdTagDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListVodAdTagDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[video_stitcher_service.ListVodAdTagDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[ad_tag_details.VodAdTagDetail]:
async def async_generator():
async for page in self.pages:
for response in page.vod_ad_tag_details:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListLiveAdTagDetailsPager:
"""A pager for iterating through ``list_live_ad_tag_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse` object, and
provides an ``__iter__`` method to iterate through its
``live_ad_tag_details`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListLiveAdTagDetails`` requests and continue to iterate
through the ``live_ad_tag_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., video_stitcher_service.ListLiveAdTagDetailsResponse],
request: video_stitcher_service.ListLiveAdTagDetailsRequest,
response: video_stitcher_service.ListLiveAdTagDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListLiveAdTagDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[video_stitcher_service.ListLiveAdTagDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[ad_tag_details.LiveAdTagDetail]:
for page in self.pages:
yield from page.live_ad_tag_details
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListLiveAdTagDetailsAsyncPager:
"""A pager for iterating through ``list_live_ad_tag_details`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``live_ad_tag_details`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListLiveAdTagDetails`` requests and continue to iterate
through the ``live_ad_tag_details`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[video_stitcher_service.ListLiveAdTagDetailsResponse]
],
request: video_stitcher_service.ListLiveAdTagDetailsRequest,
response: video_stitcher_service.ListLiveAdTagDetailsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListLiveAdTagDetailsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListLiveAdTagDetailsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[video_stitcher_service.ListLiveAdTagDetailsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[ad_tag_details.LiveAdTagDetail]:
async def async_generator():
async for page in self.pages:
for response in page.live_ad_tag_details:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListSlatesPager:
"""A pager for iterating through ``list_slates`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListSlatesResponse` object, and
provides an ``__iter__`` method to iterate through its
``slates`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListSlates`` requests and continue to iterate
through the ``slates`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListSlatesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., video_stitcher_service.ListSlatesResponse],
request: video_stitcher_service.ListSlatesRequest,
response: video_stitcher_service.ListSlatesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListSlatesRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListSlatesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListSlatesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[video_stitcher_service.ListSlatesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[slates.Slate]:
for page in self.pages:
yield from page.slates
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListSlatesAsyncPager:
"""A pager for iterating through ``list_slates`` requests.
This class thinly wraps an initial
:class:`google.cloud.video.stitcher_v1.types.ListSlatesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``slates`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListSlates`` requests and continue to iterate
through the ``slates`` field on the
corresponding responses.
All the usual :class:`google.cloud.video.stitcher_v1.types.ListSlatesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[video_stitcher_service.ListSlatesResponse]],
request: video_stitcher_service.ListSlatesRequest,
response: video_stitcher_service.ListSlatesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.video.stitcher_v1.types.ListSlatesRequest):
The initial request object.
response (google.cloud.video.stitcher_v1.types.ListSlatesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = video_stitcher_service.ListSlatesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[video_stitcher_service.ListSlatesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[slates.Slate]:
async def async_generator():
async for page in self.pages:
for response in page.slates:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| 39.658892 | 92 | 0.679115 |
032de80813ddb4573ea7ba2a4833d3972fd2a66f | 7,067 | py | Python | appengine_module/gae_ts_mon/test/instrument_webapp2_test.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | null | null | null | appengine_module/gae_ts_mon/test/instrument_webapp2_test.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | null | null | null | appengine_module/gae_ts_mon/test/instrument_webapp2_test.py | allaparthi/monorail | e18645fc1b952a5a6ff5f06e0c740d75f1904473 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import mock
import webapp2
from .test_support import test_case
from google.appengine.api.runtime import runtime
from infra_libs.ts_mon import config
from infra_libs.ts_mon import instrument_webapp2
from infra_libs.ts_mon.common import http_metrics
from infra_libs.ts_mon.common import interface
from infra_libs.ts_mon.common import targets
class InstrumentWebapp2Test(test_case.TestCase):
def setUp(self):
super(InstrumentWebapp2Test, self).setUp()
config.reset_for_unittest()
self.next_time = 42.0
self.time_increment = 3.0
def fake_time(self):
ret = self.next_time
self.next_time += self.time_increment
return ret
def test_instrument_webapp2_invoked_multiple_times(self):
class Handler(webapp2.RequestHandler):
def get(self):
self.response.write('success!')
app = webapp2.WSGIApplication([('/', Handler)])
self.assertFalse(instrument_webapp2._is_instrumented(app))
instrument_webapp2.instrument(app, time_fn=self.fake_time)
self.assertTrue(instrument_webapp2._is_instrumented(app))
instrument_webapp2.instrument(app, time_fn=self.fake_time)
self.assertTrue(instrument_webapp2._is_instrumented(app))
# trigger a test page handler and check if the value of the HTTP metric
# didn't increase twice.
app.get_response('/')
fields = {'name': '^/$', 'status': 200, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_success(self):
class Handler(webapp2.RequestHandler):
def get(self):
self.response.write('success!')
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app, time_fn=self.fake_time)
app.get_response('/')
fields = {'name': '^/$', 'status': 200, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
self.assertLessEqual(3000, http_metrics.server_durations.get(fields).sum)
self.assertEqual(
len('success!'),
http_metrics.server_response_bytes.get(fields).sum)
def test_abort(self):
class Handler(webapp2.RequestHandler):
def get(self):
self.abort(417)
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 417, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_set_status(self):
class Handler(webapp2.RequestHandler):
def get(self):
self.response.set_status(418)
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 418, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_exception(self):
class Handler(webapp2.RequestHandler):
def get(self):
raise ValueError
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 500, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_http_exception(self):
class Handler(webapp2.RequestHandler):
def get(self):
raise webapp2.exc.HTTPExpectationFailed()
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 417, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_return_response(self):
class Handler(webapp2.RequestHandler):
def get(self):
ret = webapp2.Response()
ret.set_status(418)
return ret
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 418, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_robot(self):
class Handler(webapp2.RequestHandler):
def get(self):
ret = webapp2.Response()
ret.set_status(200)
return ret
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/', user_agent='GoogleBot')
fields = {'name': '^/$', 'status': 200, 'is_robot': True}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_missing_response_content_length(self):
class Handler(webapp2.RequestHandler):
def get(self):
del self.response.headers['content-length']
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/')
fields = {'name': '^/$', 'status': 200, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
self.assertIsNone(http_metrics.server_response_bytes.get(fields))
def test_not_found(self):
app = webapp2.WSGIApplication([])
instrument_webapp2.instrument(app)
app.get_response('/notfound')
fields = {'name': '', 'status': 404, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
def test_post(self):
class Handler(webapp2.RequestHandler):
def post(self):
pass
app = webapp2.WSGIApplication([('/', Handler)])
instrument_webapp2.instrument(app)
app.get_response('/', POST='foo')
fields = {'name': '^/$', 'status': 200, 'is_robot': False}
self.assertEqual(1, http_metrics.server_response_status.get(fields))
self.assertEqual(
len('foo'),
http_metrics.server_request_bytes.get(fields).sum)
class TaskNumAssignerHandlerTest(test_case.TestCase):
def setUp(self):
super(TaskNumAssignerHandlerTest, self).setUp()
config.reset_for_unittest()
target = targets.TaskTarget('test_service', 'test_job', 'test_region',
'test_host')
self.mock_state = interface.State(target=target)
# Workaround the fact that 'system' module is not mocked.
class _memory_usage(object):
def current(self):
return 10.0
env = os.environ.copy()
env['SERVER_SOFTWARE'] = 'PRODUCTION'
self.mock(runtime, 'memory_usage', _memory_usage)
self.mock(os, 'environ', env)
self.app = instrument_webapp2.create_app()
instrument_webapp2.instrument(self.app)
def tearDown(self):
mock.patch.stopall()
super(TaskNumAssignerHandlerTest, self).tearDown()
def test_success(self):
response = self.app.get_response(
'/internal/cron/ts_mon/send', headers=[('X-Appengine-Cron', 'true')])
self.assertEqual(response.status_int, 200)
def test_unauthorized(self):
response = self.app.get_response('/internal/cron/ts_mon/send')
self.assertEqual(response.status_int, 403)
| 28.611336 | 77 | 0.691807 |
384aaec5c57ce4946d2f54b7b21ecea39e6b4298 | 1,777 | py | Python | setup.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | 1 | 2018-05-12T21:28:49.000Z | 2018-05-12T21:28:49.000Z | setup.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | null | null | null | setup.py | tamland/mopidy | 7d5117c299ec33b66b67b906a9971bcbc77c3133 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import, unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
init_py = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", init_py))
return metadata['version']
setup(
name='Mopidy',
version=get_version('mopidy/__init__.py'),
url='http://www.mopidy.com/',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Music server with MPD and Spotify support',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Pykka >= 1.1',
'tornado >= 2.3',
],
extras_require={'http': []},
test_suite='nose.collector',
tests_require=[
'nose',
'mock >= 1.0',
],
entry_points={
'console_scripts': [
'mopidy = mopidy.__main__:main',
],
'mopidy.ext': [
'http = mopidy.http:Extension',
'local = mopidy.local:Extension',
'mpd = mopidy.mpd:Extension',
'softwaremixer = mopidy.softwaremixer:Extension',
'stream = mopidy.stream:Extension',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| 29.616667 | 68 | 0.588633 |
c32040e0e0304e30fc3ce5b6169122a1fa9cf8ff | 85 | py | Python | apps.py | joannaksk/igirl | edb9220a2905bc744b78ce5c0fe5a40a51189894 | [
"MIT"
] | null | null | null | apps.py | joannaksk/igirl | edb9220a2905bc744b78ce5c0fe5a40a51189894 | [
"MIT"
] | null | null | null | apps.py | joannaksk/igirl | edb9220a2905bc744b78ce5c0fe5a40a51189894 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class IgirlConfig(AppConfig):
name = 'igirl'
| 14.166667 | 33 | 0.741176 |
572c6139cece897a3055248ca03c5b66ca402ef2 | 98 | py | Python | setup.py | datakurre/robotframework-seleniumlibrary | 8fdec6b73c66eef7f95ad4ba16b3912cb3f19331 | [
"BSD-3-Clause"
] | 10 | 2019-08-13T02:15:45.000Z | 2020-12-09T15:49:36.000Z | setup.py | datakurre/robotframework-seleniumlibrary | 8fdec6b73c66eef7f95ad4ba16b3912cb3f19331 | [
"BSD-3-Clause"
] | 9 | 2019-01-06T18:36:58.000Z | 2021-04-02T05:57:36.000Z | setup.py | datakurre/robotframework-seleniumlibrary | 8fdec6b73c66eef7f95ad4ba16b3912cb3f19331 | [
"BSD-3-Clause"
] | 2 | 2019-01-06T18:38:48.000Z | 2021-04-02T06:01:33.000Z | # -*- coding: utf-8 -*-
from setuptools import setup; setup(package_dir={'': 'src'}) # noqa: E702 | 49 | 74 | 0.632653 |
16477018e71401c1c1c6034339197578b9db9a75 | 849 | py | Python | {{cookiecutter.project_name}}/server/settings/__init__.py | mikebirdgeneau/wemake-django-template | 885a091db9e7c58c0706087a86dff253d41cd000 | [
"MIT"
] | null | null | null | {{cookiecutter.project_name}}/server/settings/__init__.py | mikebirdgeneau/wemake-django-template | 885a091db9e7c58c0706087a86dff253d41cd000 | [
"MIT"
] | 24 | 2021-01-04T12:16:40.000Z | 2021-07-23T12:17:34.000Z | {{cookiecutter.project_name}}/server/settings/__init__.py | mikebirdgeneau/wemake-django-template | 885a091db9e7c58c0706087a86dff253d41cd000 | [
"MIT"
] | null | null | null | """
This is a django-split-settings main file.
For more information read this:
https://github.com/sobolevn/django-split-settings
https://sobolevn.me/2017/04/managing-djangos-settings
To change settings file:
`DJANGO_ENV=production python manage.py runserver`
"""
from os import environ
from split_settings.tools import include, optional
# Managing environment via DJANGO_ENV variable:
environ.setdefault('DJANGO_ENV', 'development')
_ENV = environ['DJANGO_ENV']
_base_settings = (
'components/common.py',
'components/logging.py',
'components/csp.py',
'components/caches.py',
# You can even use glob:
# 'components/*.py'
# Select the right env:
'environments/{0}.py'.format(_ENV),
# Optionally override some settings:
optional('environments/local.py'),
)
# Include settings:
include(*_base_settings)
| 22.342105 | 53 | 0.727915 |
958042d22b18881a20b288f2a972f3308a546b5e | 631 | py | Python | hamal/hamal/conf/vmware_rsa_license.py | JackDan9/hamal | 965be9db066209300c52f0cf17d251290d8901b7 | [
"MIT"
] | 3 | 2020-06-12T13:03:46.000Z | 2020-08-06T11:25:46.000Z | hamal/hamal/conf/vmware_rsa_license.py | JackDan9/hamal | 965be9db066209300c52f0cf17d251290d8901b7 | [
"MIT"
] | null | null | null | hamal/hamal/conf/vmware_rsa_license.py | JackDan9/hamal | 965be9db066209300c52f0cf17d251290d8901b7 | [
"MIT"
] | null | null | null | # Copyright 2020 Hamal, Inc.
from oslo_config import cfg
vmware_license_group = cfg.OptGroup(name='vmware_rsa_license',
title='VMware License Options')
VMWARE_LICENSE_ALL_OPTS = [
cfg.StrOpt('private_key_file',
default='/etc/hamal/hamal_private.pem'),
cfg.StrOpt('public_key_file',
default='/etc/hamal/hamal_public.pem'),
]
def register_opts(conf):
conf.register_group(vmware_license_group)
conf.register_opts(VMWARE_LICENSE_ALL_OPTS, group=vmware_license_group)
def list_opts():
return { vmware_license_group: VMWARE_LICENSE_ALL_OPTS }
| 27.434783 | 75 | 0.697306 |
50c22dea26b45ee3872afe2bd6c49a073189e1f8 | 1,806 | py | Python | webservice/websocket/web_socket_connection_handler.py | PedalController/PedalPiREST | aa9418d44f2f5dbec604753a03bf8a74057c627c | [
"Apache-2.0"
] | null | null | null | webservice/websocket/web_socket_connection_handler.py | PedalController/PedalPiREST | aa9418d44f2f5dbec604753a03bf8a74057c627c | [
"Apache-2.0"
] | 42 | 2016-07-04T11:17:54.000Z | 2018-03-18T18:36:09.000Z | webservice/websocket/web_socket_connection_handler.py | PedalController/PedalPiREST | aa9418d44f2f5dbec604753a03bf8a74057c627c | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 SrMouraSilva
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import uuid
import logging
from webservice.websocket.web_socket_connections import WebSocketConnections
from webservice.websocket.websocket_connection_observer import WebSocketConnectionObserver
from tornado import websocket
class WebSocketConnectionHandler(websocket.WebSocketHandler):
webservice = None
def initialize(self, app, webservice):
self.webservice = webservice
pass
def check_origin(self, origin):
#return bool(re.match(r'^.*?\.mydomain\.com', origin))
return True
def open(self):
token = str(uuid.uuid4())
logging.info('WebSocket opened - Token {}'.format(token))
observer = WebSocketConnectionObserver(self)
self.webservice.register_observer(observer)
WebSocketConnections.register(token, self, observer)
self.write_message(json.dumps({'type': 'TOKEN', 'value': token}))
def on_message(self, message):
self.write_message(json.dumps({'error': 'Use REST api for send data'}))
def on_close(self):
token, observer = WebSocketConnections.unregister(self)
self.webservice.unregister_observer(observer)
logging.info('WebSocket closed - Token {}'.format(token))
| 32.836364 | 90 | 0.727021 |
0433507d3af87df3bf65423ed57f456f96272693 | 3,180 | py | Python | tsx/models/classifier/base.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | tsx/models/classifier/base.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | tsx/models/classifier/base.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | import torch
import pickle
import torch.nn as nn
import numpy as np
from os.path import join
from tsx.models import BaseCNN
class BaseClassifier(BaseCNN):
def transform(self, X):
raise NotImplementedError()
def save(self):
raise NotImplementedError()
def load(self):
raise NotImplementedError()
def inform(self, string):
if self.verbose:
print(string)
def preprocessing(self, X_train, y_train, X_test=None, y_test=None):
raise NotImplementedError()
class BasePyTorchClassifier(BaseClassifier):
def __init__(self, n_classes=10, epochs=5, batch_size=10, verbose=False, optimizer=torch.optim.Adam, loss=nn.CrossEntropyLoss, learning_rate=1e-3):
super(BasePyTorchClassifier, self).__init__()
self.classifier = True
self.forecaster = False
self.loss = loss
self.n_classes = n_classes
self.optimizer = optimizer
self.batch_size = batch_size
self.learning_rate = learning_rate
self.verbose = verbose
self.epochs = epochs
self.fitted = False
def fit(self, X_train, y_train, X_test=None, y_test=None):
# Expects X, y to be Pytorch tensors
X_train, y_train, X_test, y_test = self.preprocessing(X_train, y_train, X_test=X_test, y_test=y_test)
ds = torch.utils.data.TensorDataset(X_train, y_train)
dl = torch.utils.data.DataLoader(ds, batch_size=self.batch_size, shuffle=True)
loss_fn = self.loss()
optim = self.optimizer(self.parameters(), lr=self.learning_rate)
for epoch in range(self.epochs):
print_epoch = epoch + 1
epoch_loss = 0.0
for i, (X, y) in enumerate(dl):
optim.zero_grad()
prediction = self.forward(X)
loss = loss_fn(prediction, y)
loss.backward()
epoch_loss += loss.item()
optim.step()
train_accuracy = self.accuracy(X_train, y_train)
if X_test is not None and y_test is not None:
test_accuracy = self.accuracy(X_test, y_test)
print("Epoch {} train_loss {} train_accuracy {} test_accuracy {}".format(print_epoch, epoch_loss, train_accuracy, test_accuracy))
else:
print("Epoch {} train_loss {} train_accuracy {}".format(print_epoch, epoch_loss, train_accuracy))
self.fitted = True
# def predict(self, X):
# # Expects X to be Pytorch tensors
# return self.forward(self.transform(X))
def accuracy(self, X, y, batch_size=None):
# Expects X, y to be Pytorch tensors
number_y = len(y)
if batch_size is None:
batch_size = self.batch_size
ds = torch.utils.data.TensorDataset(X, y)
dl = torch.utils.data.DataLoader(ds, batch_size=batch_size, shuffle=False)
running_correct = 0
for i, (X, y) in enumerate(dl):
prediction = self.forward(X)
prediction = torch.argmax(prediction, dim=-1)
running_correct += torch.sum((prediction == y).float())
return running_correct / number_y
| 34.193548 | 151 | 0.624528 |
019ff1f26130342562e267aa2539db14a60f62d5 | 603 | py | Python | DbbenchTools/__init__.py | AaronHirsch/dbbench-tools | 987dbdba7231d70056e585dd4ee3906d64786d65 | [
"Apache-2.0"
] | 2 | 2016-08-11T12:22:24.000Z | 2020-04-13T15:09:03.000Z | DbbenchTools/__init__.py | AaronHirsch/dbbench-tools | 987dbdba7231d70056e585dd4ee3906d64786d65 | [
"Apache-2.0"
] | 1 | 2019-01-12T18:41:03.000Z | 2019-01-12T18:41:03.000Z | DbbenchTools/__init__.py | AaronHirsch/dbbench-tools | 987dbdba7231d70056e585dd4ee3906d64786d65 | [
"Apache-2.0"
] | 2 | 2019-01-12T11:34:22.000Z | 2019-01-16T03:07:00.000Z | #
# Copyright (c) 2016 by MemSQL. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| 37.6875 | 74 | 0.754561 |
ea3bf93d047a68730f8020b9b8c8d7b3f14b2229 | 99 | py | Python | stakeholders/apps.py | banillie/project_tracker | 1076b04a8ca6cb0205d8775a7b1e15e225b62a92 | [
"MIT"
] | null | null | null | stakeholders/apps.py | banillie/project_tracker | 1076b04a8ca6cb0205d8775a7b1e15e225b62a92 | [
"MIT"
] | 35 | 2021-09-24T15:02:21.000Z | 2022-03-28T14:00:26.000Z | stakeholders/apps.py | banillie/project_tracker | 1076b04a8ca6cb0205d8775a7b1e15e225b62a92 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class StakeholdersConfig(AppConfig):
name = 'stakeholders'
| 16.5 | 36 | 0.777778 |
7a7ee202af5b41ff494a6ae84291f7f7e893cac4 | 1,384 | py | Python | tests/test_literal.py | zhPavel/dataclass_factory | 65820601f0259be60e7dd907207b5d29fb05e2b1 | [
"Apache-2.0"
] | 189 | 2018-09-26T07:52:50.000Z | 2022-03-25T14:09:54.000Z | tests/test_literal.py | zhPavel/dataclass_factory | 65820601f0259be60e7dd907207b5d29fb05e2b1 | [
"Apache-2.0"
] | 102 | 2018-09-15T08:54:37.000Z | 2022-03-14T09:18:42.000Z | tests/test_literal.py | zhPavel/dataclass_factory | 65820601f0259be60e7dd907207b5d29fb05e2b1 | [
"Apache-2.0"
] | 35 | 2018-09-26T08:13:07.000Z | 2022-03-25T18:08:01.000Z | import sys
from typing import Any
from unittest import TestCase
from nose2.tools import params # type: ignore
from typing_extensions import Literal as CompatLiteral
from dataclass_factory import Factory
LITERALS: Any = [CompatLiteral]
if sys.version_info >= (3, 8):
from typing import Literal as PyLiteral
LITERALS.append(PyLiteral)
class TestLiteral(TestCase):
def setUp(self) -> None:
self.factory = Factory()
@params(*LITERALS)
def test_literal_fail(self, literal):
abc = literal["a", "b", "c"]
one = literal[1]
with self.assertRaises(ValueError):
self.factory.load("d", abc)
with self.assertRaises(ValueError):
self.factory.load(1.0, one)
@params(*LITERALS)
def test_literal(self, literal):
abc = literal["a", "b", "c"]
one = literal[1]
self.assertEqual(self.factory.load("a", abc), "a")
self.assertEqual(self.factory.load("b", abc), "b")
self.assertEqual(self.factory.load("c", abc), "c")
self.assertEqual(self.factory.load(1, one), 1)
self.assertEqual(self.factory.dump("a", abc), "a")
self.assertEqual(self.factory.dump("b", abc), "b")
self.assertEqual(self.factory.dump("c", abc), "c")
self.assertEqual(self.factory.dump("Z", abc), "Z")
self.assertEqual(self.factory.dump(1, one), 1)
| 30.086957 | 58 | 0.634393 |
624a3994b56c8f7d11c67c6704d84c97333b74e1 | 3,664 | py | Python | Qubie_code/python/block.py | OldETC/Bipedal_Qubie | a789ea84ce83c6e135bb55d48fa63464ed958632 | [
"Apache-2.0"
] | null | null | null | Qubie_code/python/block.py | OldETC/Bipedal_Qubie | a789ea84ce83c6e135bb55d48fa63464ed958632 | [
"Apache-2.0"
] | null | null | null | Qubie_code/python/block.py | OldETC/Bipedal_Qubie | a789ea84ce83c6e135bb55d48fa63464ed958632 | [
"Apache-2.0"
] | null | null | null | """
This demo will draw a few rectangles onto the screen along with some text
on top of that.
This example is for use on (Linux) computers that are using CPython with
Adafruit Blinka to support CircuitPython libraries. CircuitPython does
not support PIL/pillow (python imaging library)!
Author(s): Melissa LeBlanc-Williams for Adafruit Industries
"""
import digitalio
import board
from PIL import Image, ImageDraw, ImageFont
import adafruit_rgb_display.ili9341 as ili9341
import time
#import adafruit_rgb_display.st7789 as st7789 # pylint: disable=unused-import
#import adafruit_rgb_display.hx8357 as hx8357 # pylint: disable=unused-import
#import adafruit_rgb_display.st7735 as st7735 # pylint: disable=unused-import
#import adafruit_rgb_display.ssd1351 as ssd1351 # pylint: disable=unused-import
import adafruit_rgb_display.ssd1331 as ssd1331 # pylint: disable=unused-import
# First define some constants to allow easy resizing of shapes.
BORDER = 20
FONTSIZE = 24
# Configuration for CS and DC pins (these are PiTFT defaults):
cs_pin = digitalio.DigitalInOut(board.CE0)
dc_pin = digitalio.DigitalInOut(board.D25)
reset_pin = digitalio.DigitalInOut(board.D24)
# Config for display baudrate (default max is 24mhz):
BAUDRATE = 24000000
# Setup SPI bus using hardware SPI:
spi = board.SPI()
# pylint: disable=line-too-long
# Create the display:
# disp = st7789.ST7789(spi, rotation=90, # 2.0" ST7789
# disp = st7789.ST7789(spi, height=240, y_offset=80, rotation=180, # 1.3", 1.54" ST7789
# disp = st7789.ST7789(spi, rotation=90, width=135, height=240, x_offset=53, y_offset=40, # 1.14" ST7789
# disp = hx8357.HX8357(spi, rotation=180, # 3.5" HX8357
# disp = st7735.ST7735R(spi, rotation=90, # 1.8" ST7735R
# disp = st7735.ST7735R(spi, rotation=270, height=128, x_offset=2, y_offset=3, # 1.44" ST7735R
# disp = st7735.ST7735R(spi, rotation=90, bgr=True, # 0.96" MiniTFT ST7735R
# disp = ssd1351.SSD1351(spi, rotation=180, # 1.5" SSD1351
# disp = ssd1351.SSD1351(spi, height=96, y_offset=32, rotation=180, # 1.27" SSD1351
# disp = ssd1331.SSD1331(spi, rotation=180, # 0.96" SSD1331
disp = ili9341.ILI9341(
spi,
rotation=180, # 2.2", 2.4", 2.8", 3.2" ILI9341
cs=cs_pin,
dc=dc_pin,
rst=reset_pin,
baudrate=BAUDRATE,
)
# pylint: enable=line-too-long
# Create blank image for drawing.
# Make sure to create image with mode 'RGB' for full color.
if disp.rotation % 180 == 90:
height = disp.width # we swap height/width to rotate it to landscape!
width = disp.height
else:
width = disp.width # we swap height/width to rotate it to landscape!
height = disp.height
print ('width=', width, 'height=',height,'\n')
image = Image.new("RGB", (width, height))
print ('image created\n')
# Get drawing object to draw on image.
draw = ImageDraw.Draw(image)
print ('draw\n')
# Draw a green filled box as the background
draw.rectangle((0, 0, width, height), fill=(0, 255, 0))
print ('rectangle\n')
disp.image(image)
print ('disp\n')
# Draw a smaller inner purple rectangle
draw.rectangle(
(BORDER, BORDER, width - BORDER - 1, height - BORDER - 1), fill=(170, 0, 136)
)
print ('small\n')
# Load a TTF Font
font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf", FONTSIZE)
print ('font\n')
# Draw Some Text
text = "Hello World!"
(font_width, font_height) = font.getsize(text)
draw.text(
(width // 2 - font_width // 2, height // 2 - font_height // 2),
text,
font=font,
fill=(255, 255, 0),
)
# Display image.
disp.image(image)
| 35.921569 | 104 | 0.691048 |
cce1f3bc8987c0aeb57da02b5be6ace2539fffa5 | 179 | py | Python | perfdaily/magma.py | bochun/perfrunner | e215c73240381cf82fddc40856f560369c9b75a8 | [
"Apache-2.0"
] | 18 | 2015-10-28T23:12:07.000Z | 2022-01-04T14:23:37.000Z | perfdaily/magma.py | bochun/perfrunner | e215c73240381cf82fddc40856f560369c9b75a8 | [
"Apache-2.0"
] | 11 | 2019-03-19T12:02:31.000Z | 2022-02-11T03:39:44.000Z | perfdaily/magma.py | bochun/perfrunner | e215c73240381cf82fddc40856f560369c9b75a8 | [
"Apache-2.0"
] | 39 | 2015-06-07T09:17:16.000Z | 2022-03-06T20:32:01.000Z | from perfdaily import DailyTest
from perfrunner.tests.magma import MagmaBenchmarkTest as _MagmaBenchmarkTest
class MagmaBenchmarkTest(DailyTest, _MagmaBenchmarkTest):
pass
| 22.375 | 76 | 0.849162 |
27d2cd7c53326c126220e5fabb3b4c17d1a77160 | 3,073 | py | Python | python/libsunnet/snBase.py | Tyill/skynet | 1a34fc27b523345603d4fb88570f44fbbbef1b80 | [
"MIT"
] | 64 | 2018-10-14T16:36:05.000Z | 2021-03-22T10:20:07.000Z | python/libsunnet/snBase.py | Tyill/sunnet | 1a34fc27b523345603d4fb88570f44fbbbef1b80 | [
"MIT"
] | 4 | 2018-10-15T05:52:01.000Z | 2020-04-01T14:41:54.000Z | python/libsunnet/snBase.py | Tyill/sunnet | 1a34fc27b523345603d4fb88570f44fbbbef1b80 | [
"MIT"
] | 20 | 2018-10-14T18:29:15.000Z | 2020-10-22T23:03:59.000Z | #
# sunnet project
# Copyright (C) 2018 by Contributors <https:#github.com/Tyill/sunnet>
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import ctypes
# type definitions
snFloat_p = lambda x : ctypes.cast(x, ctypes.POINTER(ctypes.c_float))
def c_str(string : str) -> ctypes.c_char_p:
"""Create ctypes char * from a Python string."""
if sys.version_info[0] > 2:
py_str = lambda x: x.encode('utf-8')
else:
py_str = lambda x: x
return ctypes.c_char_p(py_str(string))
class snLSize(ctypes.Structure):
_fields_ = [('w', ctypes.c_size_t),
('h', ctypes.c_size_t),
('ch', ctypes.c_size_t),
('bsz', ctypes.c_size_t)]
class snBNorm(ctypes.Structure):
_fields_ = [('mean', ctypes.POINTER(ctypes.c_float)),
('varce', ctypes.POINTER(ctypes.c_float)),
('scale', ctypes.POINTER(ctypes.c_float)),
('schift', ctypes.POINTER(ctypes.c_float))]
snErrCBack = ctypes.CFUNCTYPE(None, ctypes.c_char_p, ctypes.c_void_p)
snUserCBack = ctypes.CFUNCTYPE(None,
ctypes.c_char_p, # name user cback
ctypes.c_char_p, # name node
ctypes.c_bool, # current action forward(true) or backward(false)
snLSize, # input layer size - receive from prev node
ctypes.POINTER(ctypes.c_float), # input layer - receive from prev node
ctypes.POINTER(snLSize), # output layer size - send to next node
ctypes.POINTER(ctypes.POINTER(ctypes.c_float)), # output layer - send to next node
ctypes.c_void_p # aux used data
) | 48.015625 | 128 | 0.602343 |
228adaec8c749316e489d26f84971066104b35ae | 2,226 | py | Python | examples/ad_manager/v202102/line_item_creative_association_service/get_all_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 601 | 2015-01-07T09:53:49.000Z | 2022-03-31T17:18:11.000Z | examples/ad_manager/v202102/line_item_creative_association_service/get_all_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 443 | 2015-01-07T12:16:57.000Z | 2022-03-14T14:46:56.000Z | examples/ad_manager/v202102/line_item_creative_association_service/get_all_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 1,135 | 2015-01-07T15:51:55.000Z | 2022-03-31T17:18:13.000Z | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all line item creative associations.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
lica_service = client.GetService(
'LineItemCreativeAssociationService', version='v202102')
# Create a statement to select line item creative associations.
statement = ad_manager.StatementBuilder(version='v202102')
# Retrieve a small amount of line item creative associations at a time, paging
# through until all line item creative associations have been retrieved.
while True:
response = lica_service.getLineItemCreativeAssociationsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for lica in response['results']:
# Print out some information for each line item creative association.
if 'creativeSetId' in lica:
print('LICA with line item ID "%s", creative set ID "%s", and '
'status "%s" was found.' %
(lica['lineItemId'], lica['creativeSetId'], lica['status']))
else:
print('Line item creative association with line item ID "%d" and '
'creative ID "%d" was found.\n' %
(lica['lineItemId'], lica['creativeId']))
statement.offset += statement.limit
else:
break
print('\nNumber of results found: %s' % response['totalResultSetSize'])
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| 38.37931 | 80 | 0.70575 |
6da4e05e49603845d9b3da484acbdf7f2fac1b8f | 3,884 | py | Python | src/masonite/drivers/mail/MailSmtpDriver.py | Abeautifulsnow/masonite | f0ebb5ca05f5d88f21264e1cd0934435bd0a8791 | [
"MIT"
] | 1 | 2022-02-24T06:21:18.000Z | 2022-02-24T06:21:18.000Z | src/masonite/drivers/mail/MailSmtpDriver.py | Abeautifulsnow/masonite | f0ebb5ca05f5d88f21264e1cd0934435bd0a8791 | [
"MIT"
] | 1 | 2020-10-26T12:33:05.000Z | 2020-10-26T12:33:05.000Z | src/masonite/drivers/mail/MailSmtpDriver.py | Abeautifulsnow/masonite | f0ebb5ca05f5d88f21264e1cd0934435bd0a8791 | [
"MIT"
] | null | null | null | """SMTP Driver Module."""
import smtplib
import ssl
import warnings
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from ...contracts.MailContract import MailContract
from ...drivers import BaseMailDriver
class MailSmtpDriver(BaseMailDriver, MailContract):
"""Mail smtp driver."""
def message(self):
"""Creates a message object for the underlying driver.
Returns:
email.mime.multipart.MIMEMultipart
"""
message = MIMEMultipart("alternative")
message["Subject"] = self.message_subject
message["From"] = self.mail_from_header
message["To"] = self.mail_to_header
message["Reply-To"] = self.message_reply_to
# Attach both mimetypes if they exist.
if self.html_content:
message.attach(MIMEText(self.html_content, "html"))
if self.text_content:
message.attach(MIMEText(self.text_content, "plain"))
return message
def send(self, message=None, message_contents=None):
"""Send the message through SMTP.
Keyword Arguments:
message {string} -- The HTML message to be sent to SMTP. (default: {None})
Returns:
None
"""
# The old argument name was `message_contents`. users might have used this as keyword argument or as arg.
assert (
message is None or message_contents is None
), 'using deprecated argument "message_contents" together with the new arg "message" ??'
message_contents = message or message_contents
if message_contents and isinstance(message_contents, str):
warnings.warn(
"Passing message_contents to .send() is a deprecated. Please use .text() and .html().",
category=DeprecationWarning,
stacklevel=2,
)
message = self._get_message_for_send_deprecated(message_contents)
# The above should be removed once deprecation time period passed.
elif not message:
message = self.message()
self._smtp_connect()
if self._queue:
from wsgi import container
from ... import Queue
container.make(Queue).push(
self._send_mail,
args=(self.mail_from_header, self.to_addresses, message),
)
return
return self._send_mail(self.mail_from_header, self.to_addresses, message)
def _smtp_connect(self):
"""Sets self.smtp to an instance of `smtplib.SMTP`
and connects using configuration in config.DRIVERS.smtp
Returns:
None
"""
config = self.config.DRIVERS["smtp"]
if "ssl" in config and config["ssl"] is True:
self.smtp = smtplib.SMTP_SSL(
"{0}:{1}".format(config["host"], config["port"])
)
else:
self.smtp = smtplib.SMTP("{0}:{1}".format(config["host"], config["port"]))
# Check if TLS enabled
if "tls" in config and config["tls"] is True:
# Define secure TLS connection
context = ssl.create_default_context()
context.check_hostname = False
# Check if correct response code for starttls is received from the server
if self.smtp.starttls(context=context)[0] != 220:
raise smtplib.SMTPNotSupportedError(
"Server is using untrusted protocol."
)
if config.get("login", True):
self.smtp.login(config["username"], config["password"])
def _send_mail(self, *args):
"""Wrapper around sending mail so it can also be used for queues."""
mail_from_header, to_addresses, message = args
response = self.smtp.send_message(message)
self.smtp.quit()
return response
| 34.678571 | 113 | 0.610968 |
95921326ee60416d31f5f3be1d56cbd32e5b1f07 | 15,976 | py | Python | src/psiz/datasets/load_dataset.py | greenfieldvision/psiz | 37068530a78e08792e827ee55cf55e627add115e | [
"Apache-2.0"
] | 21 | 2020-04-03T21:10:05.000Z | 2021-12-02T01:31:11.000Z | src/psiz/datasets/load_dataset.py | greenfieldvision/psiz | 37068530a78e08792e827ee55cf55e627add115e | [
"Apache-2.0"
] | 14 | 2020-04-10T00:48:02.000Z | 2021-05-25T18:06:55.000Z | src/psiz/datasets/load_dataset.py | greenfieldvision/psiz | 37068530a78e08792e827ee55cf55e627add115e | [
"Apache-2.0"
] | 4 | 2020-10-13T16:46:14.000Z | 2021-11-10T00:08:47.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 The PsiZ Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module for loading internally pre-defined datasets.
Functions:
load: Load observations for the requested dataset.
Notes:
The dataset will only be downloaded from the server if it does not
exist locally. If it it already exists locally, no download will
take place. If you would like to force a download, delete the
existing local copy.
"""
import collections
import os
from pathlib import Path
import shutil
import sys
import tarfile
import time
import zipfile
import numpy as np
import six
from six.moves.urllib.error import HTTPError
from six.moves.urllib.error import URLError
from six.moves.urllib.request import urlretrieve
import psiz.catalog
import psiz.trials
def load(
dataset_name, cache_subdir='datasets', cache_dir=None,
verbose=0):
"""Load observations and catalog for the requested hosted dataset.
Arguments:
dataset_name: The name of the hosted dataset.
cache_subdir (optional): The subdirectory where downloaded
datasets are cached.
cache_dir (optional): The cache directory for PsiZ.
verbose (optional): Controls the verbosity of printed dataset summary.
Returns:
obs: An RankObservations object.
catalog: A catalog object containing information regarding the
stimuli used to collect observations.
"""
# Load from download cache.
if cache_dir is None:
cache_dir = Path.home() / Path('.psiz')
else:
cache_dir = Path(cache_dir)
dataset_path = cache_dir / Path(cache_subdir, dataset_name)
if not dataset_path.exists():
dataset_path.mkdir(parents=True)
obs = _fetch_obs(dataset_name, cache_dir, cache_subdir)
catalog = _fetch_catalog(dataset_name, cache_dir, cache_subdir)
if verbose > 0:
print("Dataset Summary")
print(' n_stimuli: {0}'.format(catalog.n_stimuli))
print(' n_trial: {0}'.format(obs.n_trial))
return (obs, catalog)
def _fetch_catalog(dataset_name, cache_dir, cache_subdir):
"""Fetch catalog for the requested dataset.
Arguments:
dataset_name: The name of the dataset to load.
cache_dir: The cache directory for PsiZ.
cache_subdir: The subdirectory where downloaded datasets are
cached.
Returns:
catalog: A Catalog object.
"""
fname = "catalog.hdf5"
dataset_exists = True
if dataset_name == "birds-12":
origin = "https://osf.io/xek89/download"
elif dataset_name == "birds-16":
origin = "https://osf.io/473vh/download"
elif dataset_name == "skin_lesions":
origin = "https://osf.io/5grsp/download"
elif dataset_name == "rocks_Nosofsky_etal_2016":
origin = "https://osf.io/vw28u/download"
elif dataset_name == "ilsvrc_val_v0_1":
origin = "https://osf.io/bf3e2/download"
elif dataset_name == "ilsvrc_val_v0_2":
origin = "https://osf.io/bf3e2/download"
else:
dataset_exists = False
if dataset_exists:
path = _get_file(
os.path.join(dataset_name, fname), origin,
cache_subdir=cache_subdir, extract=True,
cache_dir=cache_dir
)
catalog = psiz.catalog.load_catalog(path)
else:
raise ValueError(
'The requested dataset `{0}` may not exist since the '
'corresponding catalog.hdf5 file does not '
'exist.'.format(dataset_name)
)
return catalog
def _fetch_obs(dataset_name, cache_dir, cache_subdir):
"""Fetch observations for the requested dataset.
Arguments:
dataset_name: The name of the dataset to load.
cache_dir: The cache directory for PsiZ.
cache_subdir: The subdirectory where downloaded datasets are
cached.
Returns:
obs: An RankObservations object.
"""
fname = 'obs.hdf5'
dataset_exists = True
if dataset_name == "birds-12":
origin = "https://osf.io/apd3g/download"
elif dataset_name == "birds-16":
origin = "https://osf.io/nz4gy/download"
elif dataset_name == "skin_lesions":
origin = "https://osf.io/nbps4/download"
elif dataset_name == "rocks_Nosofsky_etal_2016":
origin = "https://osf.io/jauvh/download"
elif dataset_name == "ilsvrc_val_v0_1":
origin = "https://osf.io/ej6sz/download"
elif dataset_name == "ilsvrc_val_v0_2":
origin = "https://osf.io/x6dht/download"
else:
dataset_exists = False
if dataset_exists:
path = _get_file(
os.path.join(dataset_name, fname), origin,
cache_subdir=cache_subdir, extract=True, cache_dir=cache_dir
)
obs = psiz.trials.load_trials(path)
else:
raise ValueError(
'The requested dataset `{0}` may not exist since the '
'corresponding obs.hdf5 file does not '
'exist.'.format(dataset_name)
)
return obs
def _extract_archive(file_path, path='.', archive_format='auto'):
"""Extract an archive if it matches tar, tar.gz, tar.bz, or zip formats.
Arguments:
file_path: path to the archive file
path: path to extract the archive file
archive_format: Archive format to try for extracting the file.
Options are 'auto', 'tar', 'zip', and None.
'tar' includes tar, tar.gz, and tar.bz files.
The default 'auto' is ['tar', 'zip'].
None or an empty list will return no matches found.
Returns:
True if a match was found and an archive extraction was completed,
False otherwise.
"""
if archive_format is None:
return False
if archive_format == 'auto':
archive_format = ['tar', 'zip']
if isinstance(archive_format, six.string_types):
archive_format = [archive_format]
for archive_type in archive_format:
if archive_type == 'tar':
open_fn = tarfile.open
is_match_fn = tarfile.is_tarfile
if archive_type == 'zip':
open_fn = zipfile.ZipFile
is_match_fn = zipfile.is_zipfile
if is_match_fn(file_path):
with open_fn(file_path) as archive:
try:
archive.extractall(path)
except (tarfile.TarError, RuntimeError,
KeyboardInterrupt):
if os.path.exists(path):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
raise
return True
return False
def _get_file(
fname, origin, untar=False, cache_subdir='datasets', extract=False,
archive_format='auto', cache_dir=None):
"""Download a file from a URL if it not already in the cache."""
if cache_dir is None:
cache_dir = os.path.join(os.path.expanduser('~'), '.psiz')
datadir_base = os.path.expanduser(cache_dir)
if not os.access(datadir_base, os.W_OK):
datadir_base = os.path.join('/tmp', '.psiz')
datadir = os.path.join(datadir_base, cache_subdir)
if not os.path.exists(datadir):
os.makedirs(datadir)
if untar:
untar_fpath = os.path.join(datadir, fname)
fpath = untar_fpath + '.tar.gz'
else:
fpath = os.path.join(datadir, fname)
download = False
if os.path.exists(fpath):
download = False
else:
download = True
if download:
print('Downloading data from', origin)
class ProgressTracker():
"""Download progress bar."""
# Maintain progbar for the lifetime of download.
# This design was chosen for Python 2.7 compatibility.
progbar = None
def dl_progress(count, block_size, total_size):
if ProgressTracker.progbar is None:
if total_size == -1:
total_size = None
ProgressTracker.progbar = Progbar(total_size)
else:
ProgressTracker.progbar.update(count * block_size)
error_msg = 'URL fetch failure on {}: {} -- {}'
try:
try:
urlretrieve(origin, fpath, dl_progress)
except HTTPError as e:
raise Exception(error_msg.format(origin, e.code, e.msg)) from e
except URLError as e:
raise Exception(
error_msg.format(origin, e.errno, e.reason)
) from e
except (Exception, KeyboardInterrupt):
if os.path.exists(fpath):
os.remove(fpath)
raise
if untar:
if not os.path.exists(untar_fpath):
_extract_archive(fpath, datadir, archive_format='tar')
return untar_fpath
if extract:
_extract_archive(fpath, datadir, archive_format)
return fpath
class Progbar():
"""Displays a progress bar."""
def __init__(
self, target, width=30, verbose=1, interval=0.05,
stateful_metrics=None):
"""Initialize.
Arguments:
target: Total number of steps expected, None if unknown.
width: Progress bar width on screen.
verbose: Degree of verbosity.
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over time. Metrics in this
list will be displayed as-is. All others will be
averaged by the progbar before display.
interval: Minimum visual progress update interval (in
seconds).
"""
self.target = target
self.width = width
self.verbose = verbose
self.interval = interval
if stateful_metrics:
self.stateful_metrics = set(stateful_metrics)
else:
self.stateful_metrics = set()
self._dynamic_display = (
(hasattr(sys.stdout, 'isatty') and sys.stdout.isatty())
or 'ipykernel' in sys.modules
)
self._total_width = 0
self._seen_so_far = 0
self._values = collections.OrderedDict()
self._start = time.time()
self._last_update = 0
def update(self, current, values=None):
"""Update the progress bar.
Arguments
current: Index of current step.
values: List of tuples:
`(name, value_for_last_step)`.
If `name` is in `stateful_metrics`,
`value_for_last_step` will be displayed as-is.
Else, an average of the metric over time will be displayed.
"""
values = values or []
for k, v in values:
if k not in self.stateful_metrics:
if k not in self._values:
self._values[k] = [v * (current - self._seen_so_far),
current - self._seen_so_far]
else:
self._values[k][0] += v * (current - self._seen_so_far)
self._values[k][1] += (current - self._seen_so_far)
else:
# Stateful metrics output a numeric value. This representation
# means "take an average from a single value" but keeps the
# numeric formatting.
self._values[k] = [v, 1]
self._seen_so_far = current
now = time.time()
info = ' - %.0fs' % (now - self._start)
if self.verbose == 1:
if (
(now - self._last_update < self.interval)
and (self.target is not None)
and (current < self.target)
):
return
prev_total_width = self._total_width
if self._dynamic_display:
sys.stdout.write('\b' * prev_total_width)
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
if self.target is not None:
numdigits = int(np.floor(np.log10(self.target))) + 1
barstr = '%%%dd/%d [' % (numdigits, self.target)
displayed_bar = barstr % current
prog = float(current) / self.target
prog_width = int(self.width * prog)
if prog_width > 0:
displayed_bar += ('=' * (prog_width - 1))
if current < self.target:
displayed_bar += '>'
else:
displayed_bar += '='
displayed_bar += ('.' * (self.width - prog_width))
displayed_bar += ']'
else:
displayed_bar = '%7d/Unknown' % current
self._total_width = len(displayed_bar)
sys.stdout.write(displayed_bar)
if current:
time_per_unit = (now - self._start) / current
else:
time_per_unit = 0
if self.target is not None and current < self.target:
eta = time_per_unit * (self.target - current)
if eta > 3600:
eta_format = ('%d:%02d:%02d' %
(eta // 3600, (eta % 3600) // 60, eta % 60))
elif eta > 60:
eta_format = '%d:%02d' % (eta // 60, eta % 60)
else:
eta_format = '%ds' % eta
info = ' - ETA: %s' % eta_format
else:
if time_per_unit >= 1:
info += ' %.0fs/step' % time_per_unit
elif time_per_unit >= 1e-3:
info += ' %.0fms/step' % (time_per_unit * 1e3)
else:
info += ' %.0fus/step' % (time_per_unit * 1e6)
for k in self._values:
info += ' - %s:' % k
if isinstance(self._values[k], list):
avg = np.mean(
self._values[k][0] / max(1, self._values[k][1]))
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
else:
info += ' %s' % self._values[k]
self._total_width += len(info)
if prev_total_width > self._total_width:
info += (' ' * (prev_total_width - self._total_width))
if self.target is not None and current >= self.target:
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
elif self.verbose == 2:
if self.target is None or current >= self.target:
for k in self._values:
info += ' - %s:' % k
avg = np.mean(
self._values[k][0] / max(1, self._values[k][1]))
if avg > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
self._last_update = now
def add(self, n, values=None):
"""Add progress."""
self.update(self._seen_so_far + n, values)
| 34.20985 | 79 | 0.558838 |
d0314e5b4d0bf61d680e00c1a56abbadb4d3ac59 | 1,676 | py | Python | apps/utils/parse_user_agents/wrappers.py | yandenghong/KortURL | 07ae04bda5fde5f6afeee228790e237e9251b458 | [
"MIT"
] | 118 | 2019-04-04T11:16:47.000Z | 2021-11-21T15:33:22.000Z | apps/utils/parse_user_agents/wrappers.py | yandenghong/KortURL | 07ae04bda5fde5f6afeee228790e237e9251b458 | [
"MIT"
] | 5 | 2019-08-29T02:44:25.000Z | 2020-08-12T06:06:52.000Z | apps/utils/parse_user_agents/wrappers.py | yandenghong/KortURL | 07ae04bda5fde5f6afeee228790e237e9251b458 | [
"MIT"
] | 19 | 2019-04-06T06:28:57.000Z | 2020-09-11T07:49:56.000Z | from hashlib import md5
from django.conf import settings
from django.core.cache import caches
from django.core.cache import DEFAULT_CACHE_ALIAS
from apps.utils.parse_user_agents.user_agents import parse
def get_cache(backend, **kwargs):
return caches[backend]
text_type = str
USER_AGENTS_CACHE = getattr(settings, 'USER_AGENTS_CACHE', DEFAULT_CACHE_ALIAS)
if USER_AGENTS_CACHE:
cache = get_cache(USER_AGENTS_CACHE)
else:
cache = None
def get_cache_key(ua_string):
# Some user agent strings are longer than 250 characters so we use its MD5
if isinstance(ua_string, text_type):
ua_string = ua_string.encode('utf-8')
return ''.join(['django_user_agents.', md5(ua_string).hexdigest()])
def get_user_agent(request):
# Tries to get UserAgent objects from cache before constructing a UserAgent
# from scratch because parsing regexes.yaml/json (ua-parser) is slow
if not hasattr(request, 'META'):
return ''
ua_string = request.META.get('HTTP_USER_AGENT', '')
if cache:
key = get_cache_key(ua_string)
user_agent = cache.get(key)
if user_agent is None:
user_agent = parse(ua_string)
cache.set(key, user_agent)
else:
user_agent = parse(ua_string)
return user_agent
def get_and_set_user_agent(request):
# If request already has ``user_agent``, it will return that, otherwise
# call get_user_agent and attach it to request so it can be reused
if hasattr(request, 'user_agent'):
return request.user_agent
if not request:
return parse('')
request.user_agent = get_user_agent(request)
return request.user_agent
| 27.933333 | 79 | 0.711814 |
aa34ce6731dfb3d2200a22d7ad7e2392dcc50145 | 1,632 | py | Python | src/sentry/api/serializers/models/release.py | kl07/sentry | 427b456f18c4040127f7a05805089f372e920df0 | [
"BSD-3-Clause"
] | 20 | 2016-10-01T04:29:24.000Z | 2020-10-09T07:23:34.000Z | src/sentry/api/serializers/models/release.py | tobetterman/sentry | fe85d3aee19dcdbfdd27921c4fb04529fc995a79 | [
"BSD-3-Clause"
] | null | null | null | src/sentry/api/serializers/models/release.py | tobetterman/sentry | fe85d3aee19dcdbfdd27921c4fb04529fc995a79 | [
"BSD-3-Clause"
] | 7 | 2016-10-27T05:12:45.000Z | 2021-05-01T14:29:53.000Z | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register, serialize
from sentry.models import Release, TagValue
@register(Release)
class ReleaseSerializer(Serializer):
def get_attrs(self, item_list, user):
tags = {
tk.value: tk
for tk in TagValue.objects.filter(
project=item_list[0].project,
key='sentry:release',
value__in=[o.version for o in item_list],
)
}
owners = {
k: v
for k, v in zip(
item_list, serialize([i.owner for i in item_list], user)
)
}
result = {}
for item in item_list:
result[item] = {
'tag': tags.get(item.version),
'owner': owners[item],
}
return result
def serialize(self, obj, attrs, user):
d = {
'version': obj.version,
'shortVersion': obj.short_version,
'ref': obj.ref,
'url': obj.url,
'dateStarted': obj.date_started,
'dateReleased': obj.date_released,
'dateCreated': obj.date_added,
'data': obj.data,
'newGroups': obj.new_groups,
'owner': attrs['owner'],
}
if attrs['tag']:
d.update({
'lastEvent': attrs['tag'].last_seen,
'firstEvent': attrs['tag'].first_seen,
})
else:
d.update({
'lastEvent': None,
'firstEvent': None,
})
return d
| 28.631579 | 72 | 0.487132 |
a32d7e7141837aee9507f7ddc606aea884e846d6 | 1,160 | py | Python | pyinfra/operations/launchd.py | themanifold/pyinfra | 5b2eef0b22fa6b1750d595d7eb3e258ce350a693 | [
"MIT"
] | 1 | 2021-09-25T09:21:37.000Z | 2021-09-25T09:21:37.000Z | pyinfra/operations/launchd.py | themanifold/pyinfra | 5b2eef0b22fa6b1750d595d7eb3e258ce350a693 | [
"MIT"
] | null | null | null | pyinfra/operations/launchd.py | themanifold/pyinfra | 5b2eef0b22fa6b1750d595d7eb3e258ce350a693 | [
"MIT"
] | null | null | null | '''
Manage launchd services.
'''
from __future__ import unicode_literals
from pyinfra.api import operation
from pyinfra.facts.launchd import LaunchdStatus
from .util.service import handle_service_control
@operation
def service(
service,
running=True, restarted=False, command=None,
state=None, host=None,
):
'''
Manage the state of systemd managed services.
+ service: name of the service to manage
+ running: whether the service should be running
+ restarted: whether the service should be restarted
+ command: custom command to pass like: ``launchctl <command> <service>``
+ enabled: whether this service should be enabled/disabled on boot
'''
was_running = host.get_fact(LaunchdStatus).get(service, None)
yield handle_service_control(
host,
service, host.get_fact(LaunchdStatus),
'launchctl {1} {0}',
# No support for restart/reload/command
running, None, None, None,
)
# No restart command, so just stop/start
if restarted and was_running:
yield 'launchctl stop {0}'.format(service)
yield 'launchctl start {0}'.format(service)
| 26.976744 | 77 | 0.693103 |
4cb4ab2a0b61a5a9a8e90020f07a05c102f5e9ae | 63,079 | py | Python | bitshares/bitshares.py | thehapax/python-bitshares | 1fff025a1ee2fdf222004d58210f1b656fb7f2c8 | [
"MIT"
] | null | null | null | bitshares/bitshares.py | thehapax/python-bitshares | 1fff025a1ee2fdf222004d58210f1b656fb7f2c8 | [
"MIT"
] | null | null | null | bitshares/bitshares.py | thehapax/python-bitshares | 1fff025a1ee2fdf222004d58210f1b656fb7f2c8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import logging
from datetime import datetime, timedelta
from graphenecommon.chain import AbstractGrapheneChain
from bitsharesapi.bitsharesnoderpc import BitSharesNodeRPC
from bitsharesbase import operations
from bitsharesbase.account import PublicKey
from bitsharesbase.asset_permissions import asset_permissions, toint
from .account import Account
from .amount import Amount
from .asset import Asset
from .committee import Committee
from .exceptions import AccountExistsException, KeyAlreadyInStoreException
from .instance import set_shared_blockchain_instance, shared_blockchain_instance
from .price import Price
from .storage import get_default_config_store
from .transactionbuilder import ProposalBuilder, TransactionBuilder
from .vesting import Vesting
from .wallet import Wallet
from .witness import Witness
from .worker import Worker
from .htlc import Htlc
# from .utils import formatTime
log = logging.getLogger(__name__)
class BitShares(AbstractGrapheneChain):
""" Connect to the BitShares network.
:param str node: Node to connect to *(optional)*
:param str rpcuser: RPC user *(optional)*
:param str rpcpassword: RPC password *(optional)*
:param bool nobroadcast: Do **not** broadcast a transaction!
*(optional)*
:param bool debug: Enable Debugging *(optional)*
:param array,dict,string keys: Predefine the wif keys to shortcut the
wallet database *(optional)*
:param bool offline: Boolean to prevent connecting to network (defaults
to ``False``) *(optional)*
:param str proposer: Propose a transaction using this proposer
*(optional)*
:param int proposal_expiration: Expiration time (in seconds) for the
proposal *(optional)*
:param int proposal_review: Review period (in seconds) for the proposal
*(optional)*
:param int expiration: Delay in seconds until transactions are supposed
to expire *(optional)*
:param str blocking: Wait for broadcasted transactions to be included
in a block and return full transaction (can be "head" or
"irrversible")
:param bool bundle: Do not broadcast transactions right away, but allow
to bundle operations *(optional)*
Three wallet operation modes are possible:
* **Wallet Database**: Here, the bitshareslibs load the keys from the
locally stored wallet SQLite database (see ``storage.py``).
To use this mode, simply call ``BitShares()`` without the
``keys`` parameter
* **Providing Keys**: Here, you can provide the keys for
your accounts manually. All you need to do is add the wif
keys for the accounts you want to use as a simple array
using the ``keys`` parameter to ``BitShares()``.
* **Force keys**: This more is for advanced users and
requires that you know what you are doing. Here, the
``keys`` parameter is a dictionary that overwrite the
``active``, ``owner``, or ``memo`` keys for
any account. This mode is only used for *foreign*
signatures!
If no node is provided, it will connect to the node of
http://uptick.rocks. It is **highly** recommended that you
pick your own node instead. Default settings can be changed with:
.. code-block:: python
uptick set node <host>
where ``<host>`` starts with ``ws://`` or ``wss://``.
The purpose of this class it to simplify interaction with
BitShares.
The idea is to have a class that allows to do this:
.. code-block:: python
from bitshares import BitShares
bitshares = BitShares()
print(bitshares.info())
All that is requires is for the user to have added a key with
``uptick``
.. code-block:: bash
uptick addkey
and setting a default author:
.. code-block:: bash
uptick set default_account xeroc
This class also deals with edits, votes and reading content.
"""
def define_classes(self):
from .blockchainobject import BlockchainObject
self.wallet_class = Wallet
self.account_class = Account
self.rpc_class = BitSharesNodeRPC
self.default_key_store_app_name = "bitshares"
self.proposalbuilder_class = ProposalBuilder
self.transactionbuilder_class = TransactionBuilder
self.blockchainobject_class = BlockchainObject
# -------------------------------------------------------------------------
# Simple Transfer
# -------------------------------------------------------------------------
def transfer(self, to, amount, asset, memo="", account=None, **kwargs):
""" Transfer an asset to another account.
:param str to: Recipient
:param float amount: Amount to transfer
:param str asset: Asset to transfer
:param str memo: (optional) Memo, may begin with `#` for encrypted
messaging
:param str account: (optional) the source account for the transfer
if not ``default_account``
"""
from .memo import Memo
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
amount = Amount(amount, asset, blockchain_instance=self)
to = Account(to, blockchain_instance=self)
memoObj = Memo(from_account=account, to_account=to, blockchain_instance=self)
op = operations.Transfer(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"from": account["id"],
"to": to["id"],
"amount": {"amount": int(amount), "asset_id": amount.asset["id"]},
"memo": memoObj.encrypt(memo),
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account, "active", **kwargs)
# -------------------------------------------------------------------------
# Account related calls
# -------------------------------------------------------------------------
def _store_keys(self, *args):
for k in args:
try:
self.wallet.addPrivateKey(str(k))
except KeyAlreadyInStoreException:
pass
def create_account(
self,
account_name,
registrar=None,
referrer="1.2.35641",
referrer_percent=50,
owner_key=None,
active_key=None,
memo_key=None,
owner_account=None,
active_account=None,
password=None,
additional_owner_keys=[],
additional_active_keys=[],
additional_owner_accounts=[],
additional_active_accounts=[],
proxy_account="proxy-to-self",
storekeys=True,
**kwargs
):
""" Create new account on BitShares
The brainkey/password can be used to recover all generated keys
(see `bitsharesbase.account` for more details.
By default, this call will use ``default_account`` to
register a new name ``account_name`` with all keys being
derived from a new brain key that will be returned. The
corresponding keys will automatically be installed in the
wallet.
.. warning:: Don't call this method unless you know what
you are doing! Be sure to understand what this
method does and where to find the private keys
for your account.
.. note:: Please note that this imports private keys
(if password is present) into the wallet by
default. However, it **does not import the owner
key** for security reasons. Do NOT expect to be
able to recover it from the wallet if you lose
your password!
:param str account_name: (**required**) new account name
:param str registrar: which account should pay the registration fee
(defaults to ``default_account``)
:param str owner_key: Main owner key
:param str active_key: Main active key
:param str memo_key: Main memo_key
:param str password: Alternatively to providing keys, one
can provide a password from which the
keys will be derived
:param array additional_owner_keys: Additional owner public keys
:param array additional_active_keys: Additional active public keys
:param array additional_owner_accounts: Additional owner account
names
:param array additional_active_accounts: Additional acctive account
names
:param bool storekeys: Store new keys in the wallet (default:
``True``)
:raises AccountExistsException: if the account already exists on
the blockchain
"""
if not registrar and self.config["default_account"]:
registrar = self.config["default_account"]
if not registrar:
raise ValueError(
"Not registrar account given. Define it with "
+ "registrar=x, or set the default_account using uptick"
)
if password and (owner_key or active_key or memo_key):
raise ValueError("You cannot use 'password' AND provide keys!")
try:
Account(account_name, blockchain_instance=self)
raise AccountExistsException
except Exception:
pass
referrer = Account(referrer, blockchain_instance=self)
registrar = Account(registrar, blockchain_instance=self)
" Generate new keys from password"
from bitsharesbase.account import PasswordKey, PublicKey
owner_key_authority = []
active_key_authority = []
owner_accounts_authority = []
active_accounts_authority = []
if password:
active_key = PasswordKey(account_name, password, role="active")
owner_key = PasswordKey(account_name, password, role="owner")
memo_key = PasswordKey(account_name, password, role="memo")
active_pubkey = active_key.get_public_key()
owner_pubkey = owner_key.get_public_key()
memo_pubkey = memo_key.get_public_key()
active_privkey = active_key.get_private_key()
# owner_privkey = owner_key.get_private_key()
memo_privkey = memo_key.get_private_key()
# store private keys
if storekeys:
self._store_keys(active_privkey, memo_privkey)
owner_key_authority = [[format(owner_pubkey, self.prefix), 1]]
active_key_authority = [[format(active_pubkey, self.prefix), 1]]
memo = format(memo_pubkey, self.prefix)
elif owner_key and active_key and memo_key:
active_pubkey = PublicKey(active_key, prefix=self.prefix)
owner_pubkey = PublicKey(owner_key, prefix=self.prefix)
memo_pubkey = PublicKey(memo_key, prefix=self.prefix)
owner_key_authority = [[format(owner_pubkey, self.prefix), 1]]
active_key_authority = [[format(active_pubkey, self.prefix), 1]]
memo = format(memo_pubkey, self.prefix)
elif owner_account and active_account and memo_key:
memo_pubkey = PublicKey(memo_key, prefix=self.prefix)
memo = format(memo_pubkey, self.prefix)
owner_account = Account(owner_account, blockchain_instance=self)
active_account = Account(active_account, blockchain_instance=self)
owner_accounts_authority = [[owner_account["id"], 1]]
active_accounts_authority = [[active_account["id"], 1]]
else:
raise ValueError(
"Call incomplete! Provide either a password, owner/active public keys or owner/active accounts + memo key!"
)
# additional authorities
for k in additional_owner_keys:
PublicKey(k, prefix=self.prefix)
owner_key_authority.append([k, 1])
for k in additional_active_keys:
PublicKey(k, prefix=self.prefix)
active_key_authority.append([k, 1])
for k in additional_owner_accounts:
addaccount = Account(k, blockchain_instance=self)
owner_accounts_authority.append([addaccount["id"], 1])
for k in additional_active_accounts:
addaccount = Account(k, blockchain_instance=self)
active_accounts_authority.append([addaccount["id"], 1])
# voting account
voting_account = Account(
proxy_account or "proxy-to-self", blockchain_instance=self
)
op = {
"fee": {"amount": 0, "asset_id": "1.3.0"},
"registrar": registrar["id"],
"referrer": referrer["id"],
"referrer_percent": int(referrer_percent * 100),
"name": account_name,
"owner": {
"account_auths": owner_accounts_authority,
"key_auths": owner_key_authority,
"address_auths": [],
"weight_threshold": 1,
},
"active": {
"account_auths": active_accounts_authority,
"key_auths": active_key_authority,
"address_auths": [],
"weight_threshold": 1,
},
"options": {
"memo_key": memo,
"voting_account": voting_account["id"],
"num_witness": 0,
"num_committee": 0,
"votes": [],
"extensions": [],
},
"extensions": {},
"prefix": self.prefix,
}
op = operations.Account_create(**op)
return self.finalizeOp(op, registrar, "active", **kwargs)
def upgrade_account(self, account=None, **kwargs):
""" Upgrade an account to Lifetime membership
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
op = operations.Account_upgrade(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account_to_upgrade": account["id"],
"upgrade_to_lifetime_member": True,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def _test_weights_treshold(self, authority):
""" This method raises an error if the threshold of an authority cannot
be reached by the weights.
:param dict authority: An authority of an account
:raises ValueError: if the threshold is set too high
"""
weights = 0
for a in authority["account_auths"]:
weights += int(a[1])
for a in authority["key_auths"]:
weights += int(a[1])
if authority["weight_threshold"] > weights:
raise ValueError("Threshold too restrictive!")
if authority["weight_threshold"] == 0:
raise ValueError("Cannot have threshold of 0")
def allow(
self,
foreign,
weight=None,
permission="active",
account=None,
threshold=None,
**kwargs
):
""" Give additional access to an account by some other public
key or account.
:param str foreign: The foreign account that will obtain access
:param int weight: (optional) The weight to use. If not
define, the threshold will be used. If the weight is
smaller than the threshold, additional signatures will
be required. (defaults to threshold)
:param str permission: (optional) The actual permission to
modify (defaults to ``active``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
:param int threshold: The threshold that needs to be reached
by signatures to be able to interact
"""
from copy import deepcopy
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
if permission not in ["owner", "active"]:
raise ValueError("Permission needs to be either 'owner', or 'active")
account = Account(account, blockchain_instance=self)
if not weight:
weight = account[permission]["weight_threshold"]
authority = deepcopy(account[permission])
try:
pubkey = PublicKey(foreign, prefix=self.prefix)
authority["key_auths"].append([str(pubkey), weight])
except Exception:
try:
foreign_account = Account(foreign, blockchain_instance=self)
authority["account_auths"].append([foreign_account["id"], weight])
except Exception:
raise ValueError("Unknown foreign account or invalid public key")
if threshold:
authority["weight_threshold"] = threshold
self._test_weights_treshold(authority)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
permission: authority,
"extensions": {},
"prefix": self.prefix,
}
)
if permission == "owner":
return self.finalizeOp(op, account["name"], "owner", **kwargs)
else:
return self.finalizeOp(op, account["name"], "active", **kwargs)
def disallow(
self, foreign, permission="active", account=None, threshold=None, **kwargs
):
""" Remove additional access to an account by some other public
key or account.
:param str foreign: The foreign account that will obtain access
:param str permission: (optional) The actual permission to
modify (defaults to ``active``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
:param int threshold: The threshold that needs to be reached
by signatures to be able to interact
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
if permission not in ["owner", "active"]:
raise ValueError("Permission needs to be either 'owner', or 'active")
account = Account(account, blockchain_instance=self)
authority = account[permission]
try:
pubkey = PublicKey(foreign, prefix=self.prefix)
affected_items = list(
filter(lambda x: x[0] == str(pubkey), authority["key_auths"])
)
authority["key_auths"] = list(
filter(lambda x: x[0] != str(pubkey), authority["key_auths"])
)
except Exception:
try:
foreign_account = Account(foreign, blockchain_instance=self)
affected_items = list(
filter(
lambda x: x[0] == foreign_account["id"],
authority["account_auths"],
)
)
authority["account_auths"] = list(
filter(
lambda x: x[0] != foreign_account["id"],
authority["account_auths"],
)
)
except Exception:
raise ValueError("Unknown foreign account or unvalid public key")
if not affected_items:
raise ValueError("Changes nothing!")
removed_weight = affected_items[0][1]
# Define threshold
if threshold:
authority["weight_threshold"] = threshold
# Correct threshold (at most by the amount removed from the
# authority)
try:
self._test_weights_treshold(authority)
except Exception:
log.critical(
"The account's threshold will be reduced by %d" % (removed_weight)
)
authority["weight_threshold"] -= removed_weight
self._test_weights_treshold(authority)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
permission: authority,
"extensions": {},
}
)
if permission == "owner":
return self.finalizeOp(op, account["name"], "owner", **kwargs)
else:
return self.finalizeOp(op, account["name"], "active", **kwargs)
def update_memo_key(self, key, account=None, **kwargs):
""" Update an account's memo public key
This method does **not** add any private keys to your
wallet but merely changes the memo public key.
:param str key: New memo public key
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
PublicKey(key, prefix=self.prefix)
account = Account(account, blockchain_instance=self)
account["options"]["memo_key"] = key
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": account["options"],
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
# -------------------------------------------------------------------------
# Approval and Disapproval of witnesses, workers, committee, and proposals
# -------------------------------------------------------------------------
def approvewitness(self, witnesses, account=None, **kwargs):
""" Approve a witness
:param list witnesses: list of Witness name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(witnesses, (list, set, tuple)):
witnesses = {witnesses}
for witness in witnesses:
witness = Witness(witness, blockchain_instance=self)
options["votes"].append(witness["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_witness"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 1, options["votes"]))
)
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def disapprovewitness(self, witnesses, account=None, **kwargs):
""" Disapprove a witness
:param list witnesses: list of Witness name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(witnesses, (list, set, tuple)):
witnesses = {witnesses}
for witness in witnesses:
witness = Witness(witness, blockchain_instance=self)
if witness["vote_id"] in options["votes"]:
options["votes"].remove(witness["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_witness"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 1, options["votes"]))
)
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def approvecommittee(self, committees, account=None, **kwargs):
""" Approve a committee
:param list committees: list of committee member name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(committees, (list, set, tuple)):
committees = {committees}
for committee in committees:
committee = Committee(committee, blockchain_instance=self)
options["votes"].append(committee["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_committee"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 0, options["votes"]))
)
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def disapprovecommittee(self, committees, account=None, **kwargs):
""" Disapprove a committee
:param list committees: list of committee name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(committees, (list, set, tuple)):
committees = {committees}
for committee in committees:
committee = Committee(committee, blockchain_instance=self)
if committee["vote_id"] in options["votes"]:
options["votes"].remove(committee["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_committee"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 0, options["votes"]))
)
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def approveproposal(self, proposal_ids, account=None, approver=None, **kwargs):
""" Approve Proposal
:param list proposal_id: Ids of the proposals
:param str appprover: The account or key to use for approval
(defaults to ``account``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
from .proposal import Proposal
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
is_key = approver and approver[:3] == self.prefix
if not approver and not is_key:
approver = account
elif approver and not is_key:
approver = Account(approver, blockchain_instance=self)
else:
approver = PublicKey(approver)
if not isinstance(proposal_ids, (list, set, tuple)):
proposal_ids = {proposal_ids}
op = []
for proposal_id in proposal_ids:
proposal = Proposal(proposal_id, blockchain_instance=self)
update_dict = {
"fee": {"amount": 0, "asset_id": "1.3.0"},
"fee_paying_account": account["id"],
"proposal": proposal["id"],
"prefix": self.prefix,
}
if is_key:
update_dict.update({"key_approvals_to_add": [str(approver)]})
else:
update_dict.update({"active_approvals_to_add": [approver["id"]]})
op.append(operations.Proposal_update(**update_dict))
if is_key:
self.txbuffer.appendSigner(approver, "active")
return self.finalizeOp(op, account["name"], "active", **kwargs)
return self.finalizeOp(op, approver, "active", **kwargs)
def disapproveproposal(self, proposal_ids, account=None, approver=None, **kwargs):
""" Disapprove Proposal
:param list proposal_ids: Ids of the proposals
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
from .proposal import Proposal
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not approver:
approver = account
else:
approver = Account(approver, blockchain_instance=self)
if not isinstance(proposal_ids, (list, set, tuple)):
proposal_ids = {proposal_ids}
op = []
for proposal_id in proposal_ids:
proposal = Proposal(proposal_id, blockchain_instance=self)
op.append(
operations.Proposal_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"fee_paying_account": account["id"],
"proposal": proposal["id"],
"active_approvals_to_remove": [approver["id"]],
"prefix": self.prefix,
}
)
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def approveworker(self, workers, account=None, **kwargs):
""" Approve a worker
:param list workers: list of worker member name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(workers, (list, set, tuple)):
workers = {workers}
for worker in workers:
worker = Worker(worker, blockchain_instance=self)
options["votes"].append(worker["vote_for"])
options["votes"] = list(set(options["votes"]))
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def disapproveworker(self, workers, account=None, **kwargs):
""" Disapprove a worker
:param list workers: list of worker name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(workers, (list, set, tuple)):
workers = {workers}
for worker in workers:
worker = Worker(worker, blockchain_instance=self)
if worker["vote_for"] in options["votes"]:
options["votes"].remove(worker["vote_for"])
options["votes"] = list(set(options["votes"]))
options["voting_account"] = "1.2.5" # Account("proxy-to-self")["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def unset_proxy(self, account=None, **kwargs):
""" Unset the proxy account to start voting yourself
"""
return self.set_proxy("proxy-to-self", account=account, **kwargs)
def set_proxy(self, proxy_account, account=None, **kwargs):
""" Set a specific proxy for account
:param bitshares.account.Account proxy_account: Account to be
proxied
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
proxy = Account(proxy_account, blockchain_instance=self)
options = account["options"]
options["voting_account"] = proxy["id"]
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def cancel(self, orderNumbers, account=None, **kwargs):
""" Cancels an order you have placed in a given market. Requires
only the "orderNumbers". An order number takes the form
``1.7.xxx``.
:param str orderNumbers: The Order Object ide of the form
``1.7.xxxx``
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, full=False, blockchain_instance=self)
if not isinstance(orderNumbers, (list, set, tuple)):
orderNumbers = {orderNumbers}
op = []
for order in orderNumbers:
op.append(
operations.Limit_order_cancel(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"fee_paying_account": account["id"],
"order": order,
"extensions": [],
"prefix": self.prefix,
}
)
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def vesting_balance_withdraw(self, vesting_id, amount=None, account=None, **kwargs):
""" Withdraw vesting balance
:param str vesting_id: Id of the vesting object
:param bitshares.amount.Amount Amount: to withdraw ("all" if not
provided")
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not amount:
obj = Vesting(vesting_id, blockchain_instance=self)
amount = obj.claimable
op = operations.Vesting_balance_withdraw(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"vesting_balance": vesting_id,
"owner": account["id"],
"amount": {"amount": int(amount), "asset_id": amount["asset"]["id"]},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active")
def publish_price_feed(
self, symbol, settlement_price, cer=None, mssr=110, mcr=200, account=None
):
""" Publish a price feed for a market-pegged asset
:param str symbol: Symbol of the asset to publish feed for
:param bitshares.price.Price settlement_price: Price for settlement
:param bitshares.price.Price cer: Core exchange Rate (default
``settlement_price + 5%``)
:param float mssr: Percentage for max short squeeze ratio (default:
110%)
:param float mcr: Percentage for maintenance collateral ratio
(default: 200%)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
.. note:: The ``account`` needs to be allowed to produce a
price feed for ``symbol``. For witness produced
feeds this means ``account`` is a witness account!
"""
assert mcr > 100
assert mssr > 100
assert isinstance(
settlement_price, Price
), "settlement_price needs to be instance of `bitshares.price.Price`!"
assert cer is None or isinstance(
cer, Price
), "cer needs to be instance of `bitshares.price.Price`!"
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
asset = Asset(symbol, blockchain_instance=self, full=True)
backing_asset = asset["bitasset_data"]["options"]["short_backing_asset"]
assert (
asset["id"] == settlement_price["base"]["asset"]["id"]
or asset["id"] == settlement_price["quote"]["asset"]["id"]
), "Price needs to contain the asset of the symbol you'd like to produce a feed for!"
assert asset.is_bitasset, "Symbol needs to be a bitasset!"
assert (
settlement_price["base"]["asset"]["id"] == backing_asset
or settlement_price["quote"]["asset"]["id"] == backing_asset
), "The Price needs to be relative to the backing collateral!"
settlement_price = settlement_price.as_base(symbol)
if cer:
cer = cer.as_base(symbol)
if cer["quote"]["asset"]["id"] != "1.3.0":
raise ValueError("CER must be defined against core asset '1.3.0'")
else:
if settlement_price["quote"]["asset"]["id"] != "1.3.0":
raise ValueError(
"CER must be manually provided because it relates to core asset '1.3.0'"
)
cer = settlement_price.as_quote(symbol) * 0.95
op = operations.Asset_publish_feed(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"publisher": account["id"],
"asset_id": asset["id"],
"feed": {
"settlement_price": settlement_price.as_base(symbol).json(),
"core_exchange_rate": cer.as_base(symbol).json(),
"maximum_short_squeeze_ratio": int(mssr * 10),
"maintenance_collateral_ratio": int(mcr * 10),
},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active")
def update_cer(self, symbol, cer, account=None):
""" Update the Core Exchange Rate (CER) of an asset
:param str symbol: Symbol of the asset to publish feed for
:param bitshares.price.Price cer: Core exchange Rate
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(
cer, Price
), "cer needs to be instance of `bitshares.price.Price`!"
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
asset = Asset(symbol, blockchain_instance=self, full=True)
assert (
asset["id"] == cer["base"]["asset"]["id"]
or asset["id"] == cer["quote"]["asset"]["id"]
), "Price needs to contain the asset of the symbol you'd like to produce a feed for!"
cer = cer.as_base(symbol)
if cer["quote"]["asset"]["id"] != "1.3.0":
raise ValueError("CER must be defined against core asset '1.3.0'")
options = asset["options"]
options.update({"core_exchange_rate": cer.as_base(symbol).json()})
op = operations.Asset_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"issuer": account["id"],
"asset_to_update": asset["id"],
"new_options": options,
"extensions": [],
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active")
def update_witness(self, witness_identifier, url=None, key=None, **kwargs):
""" Upgrade a witness account
:param str witness_identifier: Identifier for the witness
:param str url: New URL for the witness
:param str key: Public Key for the signing
"""
witness = Witness(witness_identifier)
account = witness.account
op = operations.Witness_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"prefix": self.prefix,
"witness": witness["id"],
"witness_account": account["id"],
"new_url": url,
"new_signing_key": key,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
def reserve(self, amount, account=None, **kwargs):
""" Reserve/Burn an amount of this shares
This removes the shares from the supply
:param bitshares.amount.Amount amount: The amount to be burned.
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(amount, Amount)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
op = operations.Asset_reserve(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"payer": account["id"],
"amount_to_reserve": {
"amount": int(amount),
"asset_id": amount["asset"]["id"],
},
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def create_asset(
self,
symbol,
precision,
max_supply,
description="",
is_bitasset=False,
is_prediction_market=False,
market_fee_percent=0,
max_market_fee=None,
permissions={
"charge_market_fee": True,
"white_list": True,
"override_authority": True,
"transfer_restricted": True,
"disable_force_settle": True,
"global_settle": True,
"disable_confidential": True,
"witness_fed_asset": True,
"committee_fed_asset": True,
},
flags={
"charge_market_fee": False,
"white_list": False,
"override_authority": False,
"transfer_restricted": False,
"disable_force_settle": False,
"global_settle": False,
"disable_confidential": False,
"witness_fed_asset": False,
"committee_fed_asset": False,
},
whitelist_authorities=[],
blacklist_authorities=[],
whitelist_markets=[],
blacklist_markets=[],
bitasset_options={
"feed_lifetime_sec": 86400,
"minimum_feeds": 7,
"force_settlement_delay_sec": 86400,
"force_settlement_offset_percent": 100,
"maximum_force_settlement_volume": 50,
"short_backing_asset": "1.3.0",
"extensions": [],
},
account=None,
**kwargs
):
""" Create a new asset
:param str symbol: Asset symbol
:param int precision: Asset precision
:param int max_supply: Asset max supply
:param str description: (optional) Asset description
:param bool is_bitasset: (optional) True = bitasset, False = UIA (default:
False)
:param bool is_prediction_market: (optional) True: PD, False = plain
smartcoin (default: False)
:param float market_fee_percent: (optional) Charge market fee (0-100)
(default: 0)
:param float max_market_fee: (optional) Absolute amount of max
market fee, value of this option should be a whole number (default:
same as max_supply)
:param dict permissions: (optional) Asset permissions
:param dict flags: (optional) Enabled asset flags
:param list whitelist_authorities: (optional) List of accounts that
serve as whitelist authorities
:param list blacklist_authorities: (optional) List of accounts that
serve as blacklist authorities
:param list whitelist_markets: (optional) List of assets to allow
trading with
:param list blacklist_markets: (optional) List of assets to prevent
trading with
:param dict bitasset_options: (optional) Bitasset settings
:param str account: (optional) the issuer account
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not is_bitasset:
# Turn off bitasset-specific options
permissions["disable_force_settle"] = False
permissions["global_settle"] = False
permissions["witness_fed_asset"] = False
permissions["committee_fed_asset"] = False
bitasset_options = None
assert set(permissions.keys()).issubset(
asset_permissions.keys()
), "unknown permission"
assert set(flags.keys()).issubset(asset_permissions.keys()), "unknown flag"
# Transform permissions and flags into bitmask
permissions_int = toint(permissions)
flags_int = toint(flags)
if not max_market_fee:
max_market_fee = max_supply
op = operations.Asset_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"issuer": account["id"],
"symbol": symbol,
"precision": precision,
"common_options": {
"max_supply": int(max_supply * 10 ** precision),
"market_fee_percent": int(market_fee_percent * 100),
"max_market_fee": int(max_market_fee * 10 ** precision),
"issuer_permissions": permissions_int,
"flags": flags_int,
"core_exchange_rate": {
"base": {"amount": 1, "asset_id": "1.3.0"},
"quote": {"amount": 1, "asset_id": "1.3.1"},
},
"whitelist_authorities": [Account(a, blockchain_instance=self)["id"] for a in whitelist_authorities],
"blacklist_authorities": [Account(a, blockchain_instance=self)["id"] for a in blacklist_authorities],
"whitelist_markets": [Asset(a, blockchain_instance=self)["id"] for a in whitelist_markets],
"blacklist_markets": [Asset(a, blockchain_instance=self)["id"] for a in blacklist_markets],
"description": description,
"extensions": [],
},
"bitasset_opts": bitasset_options,
"is_prediction_market": is_prediction_market,
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def create_worker(
self,
name,
daily_pay,
end,
url="",
begin=None,
payment_type="vesting",
pay_vesting_period_days=0,
account=None,
**kwargs
):
""" Create a worker
This removes the shares from the supply
**Required**
:param str name: Name of the worke
:param bitshares.amount.Amount daily_pay: The amount to be paid
daily
:param datetime end: Date/time of end of the worker
**Optional**
:param str url: URL to read more about the worker
:param datetime begin: Date/time of begin of the worker
:param string payment_type: ["burn", "refund", "vesting"] (default:
"vesting")
:param int pay_vesting_period_days: Days of vesting (default: 0)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
from bitsharesbase.transactions import timeformat
assert isinstance(daily_pay, Amount)
assert daily_pay["asset"]["id"] == "1.3.0"
if not begin:
begin = datetime.utcnow() + timedelta(seconds=30)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if payment_type == "refund":
initializer = [0, {}]
elif payment_type == "vesting":
initializer = [1, {"pay_vesting_period_days": pay_vesting_period_days}]
elif payment_type == "burn":
initializer = [2, {}]
else:
raise ValueError('payment_type not in ["burn", "refund", "vesting"]')
op = operations.Worker_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"owner": account["id"],
"work_begin_date": begin.strftime(timeformat),
"work_end_date": end.strftime(timeformat),
"daily_pay": int(daily_pay),
"name": name,
"url": url,
"initializer": initializer,
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def fund_fee_pool(self, symbol, amount, account=None, **kwargs):
""" Fund the fee pool of an asset
:param str symbol: The symbol to fund the fee pool of
:param float amount: The amount to be burned.
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(amount, float)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
amount = Amount(amount, "1.3.0", blockchain_instance=self)
account = Account(account, blockchain_instance=self)
asset = Asset(symbol, blockchain_instance=self)
op = operations.Asset_fund_fee_pool(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"from_account": account["id"],
"asset_id": asset["id"],
"amount": int(amount),
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def create_committee_member(self, url="", account=None, **kwargs):
""" Create a committee member
:param str url: URL to read more about the worker
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
op = operations.Committee_member_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"committee_member_account": account["id"],
"url": url,
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def account_whitelist(
self,
account_to_whitelist,
lists=["white"], # set of 'white' and/or 'black'
account=None,
**kwargs
):
""" Account whitelisting
:param str account_to_whitelist: The account we want to add
to either the white- or the blacklist
:param set lists: (defaults to ``('white')``). Lists the
user should be added to. Either empty set, 'black',
'white' or both.
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
account_to_list = Account(account_to_whitelist, blockchain_instance=self)
if not isinstance(lists, (set, list)):
raise ValueError('"lists" must be of instance list()')
new_listing = operations.Account_whitelist.no_listing
if "white" in lists:
new_listing += operations.Account_whitelist.white_listed
if "black" in lists:
new_listing += operations.Account_whitelist.black_listed
op = operations.Account_whitelist(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"authorizing_account": account["id"],
"account_to_list": account_to_list["id"],
"new_listing": new_listing,
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def bid_collateral(
self, additional_collateral, debt_covered, account=None, **kwargs
):
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not isinstance(additional_collateral, (Amount)):
raise ValueError("additional_collateral must be of type Amount")
if not isinstance(debt_covered, (Amount)):
raise ValueError("debt_covered must be of type Amount")
op = operations.Bid_collateral(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"bidder": account["id"],
"additional_collateral": additional_collateral.json(),
"debt_covered": debt_covered.json(),
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def asset_settle(self, amount, account=None, **kwargs):
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not isinstance(amount, (Amount)):
raise ValueError("'amount' must be of type Amount")
op = operations.Asset_settle(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"amount": amount.json(),
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def htlc_create(
self,
amount,
to,
preimage,
hash_type="ripemd160",
account=None,
expiration=60 * 60,
**kwargs
):
import hashlib
from binascii import hexlify
from graphenebase.base58 import ripemd160
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
to = Account(to, blockchain_instance=self)
if not isinstance(amount, (Amount)):
raise ValueError("'amount' must be of type Amount")
if hash_type == "ripemd160":
preimage_type = 0
preimage_hash = hexlify(
ripemd160(hexlify(bytes(preimage, "utf-8")))
).decode("ascii")
elif hash_type == "sha1":
preimage_type = 1
preimage_hash = hashlib.sha1(bytes(preimage, "utf-8")).hexdigest()
elif hash_type == "sha256":
preimage_type = 2
preimage_hash = hashlib.sha256(bytes(preimage, "utf-8")).hexdigest()
else:
raise ValueError(
"Unknown 'hash_type'. Must be 'sha1', 'sha256', or 'ripemd160'"
)
op = operations.Htlc_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"from": account["id"],
"to": to["id"],
"amount": amount.json(),
"preimage_hash": [preimage_type, preimage_hash],
"preimage_size": len(preimage),
"claim_period_seconds": expiration,
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
def htlc_redeem(self, htlc_id, preimage, account=None, **kwargs):
from binascii import hexlify
htlc = Htlc(htlc_id)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
account = htlc["to"]
account = Account(account, blockchain_instance=self)
op = operations.Htlc_redeem(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"redeemer": account["id"],
"preimage": hexlify(bytes(preimage, "utf-8")).decode("ascii"),
"htlc_id": htlc["id"],
"extensions": [],
}
)
return self.finalizeOp(op, account, "active", **kwargs)
| 39.923418 | 123 | 0.557206 |
899896d648a247e9ce8ecab49b1b4dd37c7c9e6f | 389 | py | Python | djmemo/wsgi.py | xuelians/djmemo | ec64647121b59f76951a7e853e65089c12bf44e5 | [
"MIT"
] | null | null | null | djmemo/wsgi.py | xuelians/djmemo | ec64647121b59f76951a7e853e65089c12bf44e5 | [
"MIT"
] | null | null | null | djmemo/wsgi.py | xuelians/djmemo | ec64647121b59f76951a7e853e65089c12bf44e5 | [
"MIT"
] | null | null | null | """
WSGI config for djmemo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djmemo.settings")
application = get_wsgi_application()
| 22.882353 | 78 | 0.784062 |
d59b0e829dd723fb23d696f4a766fc2246d9ead7 | 3,279 | py | Python | mhxxinfo_server/mhxxinfo_server/settings.py | aiirohituzi/MHXXInfo | 941469b44e746ad97a03bf0dd5bfdea4cd96f5ae | [
"MIT"
] | null | null | null | mhxxinfo_server/mhxxinfo_server/settings.py | aiirohituzi/MHXXInfo | 941469b44e746ad97a03bf0dd5bfdea4cd96f5ae | [
"MIT"
] | 2 | 2018-02-01T13:54:32.000Z | 2018-03-31T14:02:40.000Z | mhxxinfo_server/mhxxinfo_server/settings.py | aiirohituzi/MHXXInfo | 941469b44e746ad97a03bf0dd5bfdea4cd96f5ae | [
"MIT"
] | null | null | null | """
Django settings for mhxxinfo_server project.
Generated by 'django-admin startproject' using Django 1.11.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7&epa*lr5)6aaqcqlx%h=h)yhsb!b7(he^90ex(=jo#@(*6ui2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'quest',
'kariwaza',
'requestQuest',
'skill',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mhxxinfo_server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mhxxinfo_server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
CORS_ORIGIN_ALLOW_ALL = True
| 25.418605 | 91 | 0.695944 |
57a60ededd976f696c3c0820301b6698679d6f66 | 4,037 | py | Python | var/spack/repos/builtin/packages/geant4-data/package.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 9 | 2018-04-18T07:51:40.000Z | 2021-09-10T03:56:57.000Z | var/spack/repos/builtin/packages/geant4-data/package.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 907 | 2018-04-18T11:17:57.000Z | 2022-03-31T13:20:25.000Z | var/spack/repos/builtin/packages/geant4-data/package.py | carlabguillen/spack | 7070bb892f9bdb5cf9e76e0eecd64f6cc5f4695c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 29 | 2018-11-05T16:14:23.000Z | 2022-02-03T16:07:09.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
import glob
class Geant4Data(BundlePackage):
"""A bundle package to hold Geant4 data packages"""
homepage = "http://geant4.cern.ch"
maintainers = ['drbenmorgan']
version('10.6.2')
version('10.6.1')
version('10.6.0')
version('10.5.1')
version('10.4.3')
version('10.4.0')
version('10.3.3')
# Add install phase so we can create the data "view"
phases = ['install']
# For clarity, declare deps on a Major-Minor version basis as
# they generally don't change on the patch level
# Can move to declaring on a dataset basis if needed
# geant4@10.6.X
depends_on("g4ndl@4.6", when='@10.6.0:10.6.9999')
depends_on("g4emlow@7.9", when='@10.6.0')
depends_on("g4emlow@7.9.1", when='@10.6.1:10.6.9999')
depends_on("g4photonevaporation@5.5", when='@10.6.0:10.6.9999')
depends_on("g4radioactivedecay@5.4", when='@10.6.0:10.6.9999')
depends_on("g4particlexs@2.1", when='@10.6.0:10.6.9999')
depends_on("g4pii@1.3", when='@10.6.0:10.6.9999')
depends_on("g4realsurface@2.1.1", when='@10.6.0:10.6.9999')
depends_on("g4saiddata@2.0", when='@10.6.0:10.6.9999')
depends_on("g4abla@3.1", when='@10.6.0:10.6.9999')
depends_on("g4incl@1.0", when='@10.6.0:10.6.9999')
depends_on("g4ensdfstate@2.2", when='@10.6.0:10.6.9999')
# geant4@10.5.X
depends_on("g4ndl@4.5", when='@10.5.0:10.5.9999')
depends_on("g4emlow@7.7", when='@10.5.0:10.5.9999')
depends_on("g4photonevaporation@5.3", when='@10.5.0:10.5.9999')
depends_on("g4radioactivedecay@5.3", when='@10.5.0:10.5.9999')
depends_on("g4particlexs@1.1", when='@10.5.0:10.5.9999')
depends_on("g4pii@1.3", when='@10.5.0:10.5.9999')
depends_on("g4realsurface@2.1.1", when='@10.5.0:10.5.9999')
depends_on("g4saiddata@2.0", when='@10.5.0:10.5.9999')
depends_on("g4abla@3.1", when='@10.5.0:10.5.9999')
depends_on("g4incl@1.0", when='@10.5.0:10.5.9999')
depends_on("g4ensdfstate@2.2", when='@10.5.0:10.5.9999')
# geant4@10.4.X
depends_on("g4ndl@4.5", when='@10.4.0:10.4.9999')
depends_on("g4emlow@7.3", when='@10.4.0:10.4.9999')
depends_on("g4photonevaporation@5.2", when='@10.4.0:10.4.9999')
depends_on("g4radioactivedecay@5.2", when='@10.4.0:10.4.9999')
depends_on("g4neutronxs@1.4", when='@10.4.0:10.4.9999')
depends_on("g4pii@1.3", when='@10.4.0:10.4.9999')
depends_on("g4realsurface@2.1.1", when='@10.4.2:10.4.9999')
depends_on("g4realsurface@2.1", when='@10.4.0:10.4.1')
depends_on("g4saiddata@1.1", when='@10.4.0:10.4.9999')
depends_on("g4abla@3.1", when='@10.4.0:10.4.9999')
depends_on("g4ensdfstate@2.2", when='@10.4.0:10.4.9999')
# geant4@10.3.X
depends_on("g4ndl@4.5", when='@10.3.0:10.3.9999')
depends_on("g4emlow@6.50", when='@10.3.0:10.3.9999')
depends_on("g4photonevaporation@4.3.2", when='@10.3.1:10.3.9999')
depends_on("g4photonevaporation@4.3", when='@10.3.0')
depends_on("g4radioactivedecay@5.1.1", when='@10.3.1:10.3.9999')
depends_on("g4radioactivedecay@5.1", when='@10.3.0')
depends_on("g4neutronxs@1.4", when='@10.3.0:10.3.9999')
depends_on("g4pii@1.3", when='@10.3.0:10.3.9999')
depends_on("g4realsurface@1.0", when='@10.3.0:10.3.9999')
depends_on("g4saiddata@1.1", when='@10.3.0:10.3.9999')
depends_on("g4abla@3.0", when='@10.3.0:10.3.9999')
depends_on("g4ensdfstate@2.1", when='@10.3.0:10.3.9999')
def install(self, spec, prefix):
spec = self.spec
data = '{0}-{1}'.format(self.name, self.version.dotted)
datadir = join_path(spec.prefix.share, data)
with working_dir(datadir, create=True):
for s in spec.dependencies():
for d in glob.glob('{0}/data/*'.format(s.prefix.share)):
os.symlink(d, os.path.basename(d))
| 40.37 | 73 | 0.628932 |
7704ab71c510a429b218714c9448026f93290385 | 3,003 | py | Python | test/test_eds.py | Jiongyu/canopen | b638e6251a9aca021e2b46144ece6c05beccc814 | [
"MIT"
] | 1 | 2019-08-24T17:57:22.000Z | 2019-08-24T17:57:22.000Z | test/test_eds.py | tabjulbharath/canopen | 264ff4fe3ba02019c9706ca5625c38abd698494a | [
"MIT"
] | null | null | null | test/test_eds.py | tabjulbharath/canopen | 264ff4fe3ba02019c9706ca5625c38abd698494a | [
"MIT"
] | 1 | 2021-03-20T14:59:03.000Z | 2021-03-20T14:59:03.000Z | import os
import unittest
import canopen
EDS_PATH = os.path.join(os.path.dirname(__file__), 'sample.eds')
class TestEDS(unittest.TestCase):
def setUp(self):
self.od = canopen.import_od(EDS_PATH, 2)
def test_load_nonexisting_file(self):
with self.assertRaises(IOError):
canopen.import_od('/path/to/wrong_file.eds')
def test_load_file_object(self):
od = canopen.import_od(open(EDS_PATH))
self.assertTrue(len(od) > 0)
def test_variable(self):
var = self.od['Producer heartbeat time']
self.assertIsInstance(var, canopen.objectdictionary.Variable)
self.assertEqual(var.index, 0x1017)
self.assertEqual(var.subindex, 0)
self.assertEqual(var.name, 'Producer heartbeat time')
self.assertEqual(var.data_type, canopen.objectdictionary.UNSIGNED16)
self.assertEqual(var.access_type, 'rw')
self.assertEqual(var.default, 0)
def test_record(self):
record = self.od['Identity object']
self.assertIsInstance(record, canopen.objectdictionary.Record)
self.assertEqual(len(record), 5)
self.assertEqual(record.index, 0x1018)
self.assertEqual(record.name, 'Identity object')
var = record['Vendor-ID']
self.assertIsInstance(var, canopen.objectdictionary.Variable)
self.assertEqual(var.name, 'Vendor-ID')
self.assertEqual(var.index, 0x1018)
self.assertEqual(var.subindex, 1)
self.assertEqual(var.data_type, canopen.objectdictionary.UNSIGNED32)
self.assertEqual(var.access_type, 'ro')
def test_array_compact_subobj(self):
array = self.od[0x1003]
self.assertIsInstance(array, canopen.objectdictionary.Array)
self.assertEqual(array.index, 0x1003)
self.assertEqual(array.name, 'Pre-defined error field')
var = array[5]
self.assertIsInstance(var, canopen.objectdictionary.Variable)
self.assertEqual(var.name, 'Pre-defined error field_5')
self.assertEqual(var.index, 0x1003)
self.assertEqual(var.subindex, 5)
self.assertEqual(var.data_type, canopen.objectdictionary.UNSIGNED32)
self.assertEqual(var.access_type, 'ro')
def test_explicit_name_subobj(self):
name = self.od[0x3004].name
self.assertEqual(name, 'Sensor Status')
name = self.od[0x3004][1].name
self.assertEqual(name, 'Sensor Status 1')
name = self.od[0x3004][3].name
self.assertEqual(name, 'Sensor Status 3')
value = self.od[0x3004][3].default
self.assertEqual(value, 3)
def test_parameter_name_with_percent(self):
name = self.od[0x3003].name
self.assertEqual(name, 'Valve % open')
def test_compact_subobj_parameter_name_with_percent(self):
name = self.od[0x3006].name
self.assertEqual(name, 'Valve 1 % Open')
def test_sub_index_w_capital_s(self):
name = self.od[0x3010][0].name
self.assertEqual(name, 'Temperature')
| 38.012658 | 76 | 0.675991 |
fe26089b4c36540d6299b4633dce5fd093cb8cd8 | 5,487 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/manager_c6443684a3a8badebe021783a46d1b90.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/manager_c6443684a3a8badebe021783a46d1b90.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/manager_c6443684a3a8badebe021783a46d1b90.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Manager(Base):
"""Managers to which the OVSDB will initiate connections to.
The Manager class encapsulates a required manager resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'manager'
_SDM_ATT_MAP = {
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'ManagerActive': 'managerActive',
'ManagerIp': 'managerIp',
'Name': 'name',
'TcpPort': 'tcpPort',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Manager, self).__init__(parent, list_op)
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
# type: () -> str
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def ManagerActive(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Enable Manager
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ManagerActive']))
@property
def ManagerIp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): IPv4 Address of the Manager
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ManagerIp']))
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def TcpPort(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Specify the TCP port for the Manager
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TcpPort']))
def update(self, Name=None):
# type: (str) -> Manager
"""Updates manager resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def get_device_ids(self, PortNames=None, ManagerActive=None, ManagerIp=None, TcpPort=None):
"""Base class infrastructure that gets a list of manager device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- ManagerActive (str): optional regex of managerActive
- ManagerIp (str): optional regex of managerIp
- TcpPort (str): optional regex of tcpPort
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| 35.862745 | 139 | 0.651176 |
67a3214c647bd5a82e6b50ad5678a2440aa4a3ff | 7,675 | py | Python | udinaturen.py | ttytyper/udinaturendk-client | 1f3fdce69e6ff507dfde2a396173267561fda467 | [
"Unlicense"
] | 1 | 2020-02-26T16:56:36.000Z | 2020-02-26T16:56:36.000Z | udinaturen.py | ttytyper/udinaturendk-client | 1f3fdce69e6ff507dfde2a396173267561fda467 | [
"Unlicense"
] | null | null | null | udinaturen.py | ttytyper/udinaturendk-client | 1f3fdce69e6ff507dfde2a396173267561fda467 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# For printing to stderr
from __future__ import print_function
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
import urllib
import json
import time
import utm
# https://pypi.org/project/gpxpy/
import gpxpy
import gpxpy.gpx
import HTMLParser
import sys
class Udinaturen:
# Info: https://naturstyrelsen.dk/udinaturen/om-udinaturen/
# Self documenting API starts here: https://admin.udinaturen.dk/api/v1/?format=json
root='https://admin.udinaturen.dk/'
# Delay between requests, to avoid overloading the servers
# TODO: This only introduces delays when turning pages within the same getAllObjects call, not across separate calls. Fix that
requestDelay=5
subCategories={} # Cache subCategories
def __init__(self,limit=1000):
# How many objects to fetch per request. The website defaults and limits to 1000
self.limit=limit
def getAllObjects(self,url):
next=url
objects=[]
while(next!=None):
eprint("Loading from: %s" % next)
page=(json.load(urllib.urlopen(next)))
if("meta" in page and "next" in page["meta"] and page["meta"]["next"]!=None):
next=self.root + page["meta"]["next"]
time.sleep(self.requestDelay)
else:
next=None
# Append to the data we've got so far
objects=list(objects + page["objects"])
return objects
def getSubcategories(self):
if(len(self.subCategories)==0):
self.subCategories=self.getAllObjects(self.root+'/api/v1/subcategory/?format=json')
return self.subCategories
class Facilities(Udinaturen):
facilities=[]
def getFrom(self,index,name):
for sid in [sub["id"] for sub in self.getSubcategories() if self.prettyText(sub[index])==name]:
facilities=self.getFacilities(subCategoryID=sid)
# TODO: Deduplicate
self.facilities=list(self.facilities + facilities)
def getFromMainCategory(self,name):
self.getFrom(index="maincategory_name",name=name)
def getFromSubCategory(self,name):
self.getFrom(index="name",name=name)
def getFacilities(self,subCategoryID=None):
if(subCategoryID==None):
subCategoryID=""
return self.getAllObjects(url=self.root+'/api/v1/facilityread/?format=json&limit='+str(self.limit)+'&subcategory=' + str(subCategoryID))
def GPX(self,newLine='\n'):
# How to do line breaks
# Some Garmin units prefer "\n", others want "<br />". Viking likes "\n", just not when editing waypoints
br=newLine
gpx=gpxpy.gpx.GPX()
# Prefix for names, to make them stand out from other POIs on a GPS device
namePrefix="Ud" # Ud(inaturen)
subCategorySymbols={
u"Frit teltningsområde": "Forest",
u"Stor lejrplads": "Picnic Area",
u"Lille lejrplads": "Campground",
u"Drikkevandspost": "Drinking Water",
u"Toilet": "Restroom"
}
attributeFlags={
u"Drikkevand": "V",
u"Shelter": "S"
}
for f in self.facilities:
description=""
# Extract and convert coordinates
# Looks like coordinates can be listed as Point or Polygon, in either of these fields
coord=[]
for field in ["the_geom","the_geom2"]:
if(f[field]["type"]=="Point"):
coord=f[field]["coordinates"]
break
latlon=utm.to_latlon(coord[0],coord[1],32,'T')
# Apply appropriate symbols (icons) based on subcategory
# https://freegeographytools.com/2008/garmin-gps-unit-waypoint-icons-table
if(f["subcategoryname"] in subCategorySymbols):
symbol=subCategorySymbols[f["subcategoryname"]]
else:
symbol=None
# Apply descriptions
description+=br
if(len(self.prettyText(f["shortdescription"]))>0):
# No need to show both short and long description if they're identical
if(self.prettyText(f["shortdescription"]) != self.prettyText(f["longdescription"])):
description+=self.prettyText(f["shortdescription"])+br
if(len(self.prettyText(f["longdescription"]))>0):
description+=self.prettyText(f["longdescription"])+br
# Contact info
description+=br+"Kontaktinfo:"+br
description+="- Navn: "+self.prettyText(f["organisation"]["name"])+br
description+="- Telefon: "+self.prettyText(f["organisation"]["telephone"])+br
description+="- Email: "+self.prettyText(f["organisation"]["email"])+br
description+="- Link: "+self.prettyText(f["organisation"]["url"])+br
# List attributes
if(len(f["attributes"])>0):
description+=br
description+="Attributter:"+br
description+=br.join(['- '+self.prettyText(a["attributename"]) for a in f["attributes"]])
# Some attributes are so useful that an indicator in the name makes
# it easier to pick out interesting facilities from a list of their
# names
flags=""
for a in f["attributes"]:
s=self.prettyText(a["attributename"])
if(s in attributeFlags):
flags+=attributeFlags[s]
if(f["subcategory"]["webbooking"]):
flags+='B' # Booking
if(len(flags)>0):
flags=' '+flags # Space prefix
# Direct link to the facility
description+=br+br
description+=self.root + f["resource_uri"]+br
waypoint=gpxpy.gpx.GPXWaypoint(
latitude=latlon[0],
longitude=latlon[1],
name=namePrefix+flags+': '+self.prettyText(f["name"]),
symbol=symbol,
# Note: Garmin units only shows comments, not description
comment=description,
description=None
)
gpx.waypoints.append(waypoint)
return gpx.to_xml()
def json(self):
return self.facilities
def prettyText(self,text):
# Many of the fields returned by the servers have odd formatting, such
# as a ton of trailing spaces. This function removes leading, trailing
# and consecutive whitespaces (and tabs, line breaks etc), and also
# tries to turn the HTML into human readable text
# TODO: Make better use of HTMLParser instead of this unmaintainable bundle of duct tape
# TODO: Newlines are hardcoded as \n. Don't do that
html=HTMLParser.HTMLParser()
return(html.unescape(" ".join(text.split()).replace('<p>','\n').replace('</p>','').replace('<br>','\n').replace('<br />','\n')))
facilities=Facilities()
#facilities.getFromSubCategory(u'Lille lejrplads')
#facilities.getFromSubCategory(u'Stor lejrplads')
#facilities.getFromSubCategory(u'Frit teltningsområde')
#facilities.getFromMainCategory(u'Overnatning')
#facilities.getFromSubCategory(u'Drikkevandspost')
#facilities.getFromSubCategory(u'Toilet')
# Take main and subcategory names from passed arguments
for arg in sys.argv:
arg=arg.decode('utf-8')
facilities.getFromMainCategory(arg)
facilities.getFromSubCategory(arg)
#print(json.dumps(facilities.json()))
print(facilities.GPX())
eprint("Fetched %s facilities" % len(facilities.json()))
| 38.762626 | 144 | 0.607948 |
1c33f61ef94624e19a7f0a90cef13310a305cb70 | 9,181 | py | Python | src/nsvqa/nn/interpreter/batch_base_interpreter.py | drewhayward/DFOL-VQA | 8c7d403bac560588ab3ac45774a3e4f71fbe9c90 | [
"MIT"
] | 23 | 2020-08-17T16:18:33.000Z | 2022-03-09T11:47:37.000Z | src/nsvqa/nn/interpreter/batch_base_interpreter.py | drewhayward/DFOL-VQA | 8c7d403bac560588ab3ac45774a3e4f71fbe9c90 | [
"MIT"
] | 1 | 2021-06-11T15:51:24.000Z | 2021-06-11T15:51:24.000Z | src/nsvqa/nn/interpreter/batch_base_interpreter.py | drewhayward/DFOL-VQA | 8c7d403bac560588ab3ac45774a3e4f71fbe9c90 | [
"MIT"
] | 7 | 2020-11-09T07:25:27.000Z | 2022-01-13T04:25:09.000Z | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file
# in the project root for full license information.
import torch
import torch.nn as nn
import os
from operator import itemgetter
from nsvqa.nn.interpreter import util
from nsvqa.nn.interpreter.batch_base_types import BatchWorld, BatchVariableSet, BatchAttentionState
from nsvqa.nn.interpreter.data_parallel import gather_results
class BatchInterpreterBase(nn.Module):
def __init__(self, name, oracle, featurizer=None, attention_transfer_state_dim=0, apply_modulation_everywhere=True, cached=False, visual_rule_learner=None, calibrator=None): #, attention_transfer_modulator=None):
super(BatchInterpreterBase, self).__init__()
self._featurizer = featurizer
self._oracle = oracle
self._name = name
self._global_step = nn.Parameter(torch.tensor([0], dtype=torch.float), requires_grad=False)
# self._atm = attention_transfer_modulator
self._has_modulator = False
self._attention_transfer_state_dim = attention_transfer_state_dim
self._apply_modulation_everywhere = apply_modulation_everywhere
self._cached = cached
self._visual_rule_learner = visual_rule_learner
self._calibrator = calibrator
def _execute(self, op_id, world, operator_batch, input_tuple, is_terminal, is_training):
pass
def _transform_attention(self, op_id, is_forward, world, operator_batch, input_tuple, is_terminal, is_training):
pass
def parameter_count(self):
return sum(p.numel() for p in self.parameters() if p.requires_grad)
def save(self, export_path_base):
torch.save(self.state_dict(), os.path.join(export_path_base, self._name))
def load(self, import_path_base):
self.load_state_dict(torch.load(os.path.join(import_path_base, self._name)), strict=False)
def build_scene(self, device, object_features, batch_index, meta_data):
if self._featurizer is not None:
features = self._featurizer.featurize_scene(device, object_features, batch_index, meta_data)
attribute_features = features['attribute_features']
relation_features = features['relation_features']
object_num = features['object_num']
if self._cached:
attribute_features, relation_features['features'] = self._oracle.compute_all_log_likelihood_2(attribute_features, relation_features['features'])
if self._calibrator is not None:
attribute_features[:, self._oracle._ontology._attribute_index], relation_features = self._calibrator(attribute_features[:, self._oracle._ontology._attribute_index], relation_features)
if self._visual_rule_learner is not None:
relation_features['object_num'] = object_num
attribute_features[:, self._oracle._ontology._attribute_index], relation_features = self._visual_rule_learner(attribute_features[:, self._oracle._ontology._attribute_index], relation_features)
else:
object_num = object_features.size()[0]
attribute_features = object_features.view(object_num, -1)
arg1 = attribute_features.repeat(1, object_num).view(object_num**2, -1)
arg2 = attribute_features.repeat(self._object_num, 1)
relation_features = torch.cat([arg1, arg2], dim=1)
return BatchWorld(device, object_num, attribute_features, relation_features, batch_index, meta_data, \
attention_transfer_state_dim=self._attention_transfer_state_dim).to(object_features.dtype)
def forward(self, program_batch_list, is_training, return_trace=False, modulator_switch=True):
# Initialize the trace
all_traces = []
all_results = []
device = program_batch_list[0].device
# Main loop
for program_batch in program_batch_list:
# Set the objects features
world = self.build_scene(program_batch.device, program_batch._object_features, program_batch._object_batch_index, program_batch._meta_data)
# print('---------------------------------------------')
# Modulator loops
if self._has_modulator and modulator_switch:
if not self._apply_modulation_everywhere:
for i in range(len(program_batch._op_batch_list) - 1):
program_batch._op_batch_list._op_id += 'n'
# Forward loop
trace = []
for i, op_batch in enumerate(program_batch._op_batch_list):
if len(program_batch._dependencies[i]) > 1:
input_tuple = tuple(itemgetter(*program_batch._dependencies[i])(trace))
elif len(program_batch._dependencies[i]) == 1:
input_tuple = (trace[program_batch._dependencies[i][0]],)
else:
input_tuple = (None,)
x, terminate = self._transform_attention(op_batch._op_id, True, world, op_batch, input_tuple, i == len(program_batch._op_batch_list) - 1, is_training)
# Gate the unaffected questions
if i < len(program_batch._op_batch_list) - 1 and input_tuple[0] is not None and op_batch._mask is not None:
x = x.gate(input_tuple[0], op_batch._mask)
trace.append(x)
if terminate:
break
# Backward loop
reversed_dependencies = util.reverse_dependencies(program_batch._dependencies)
first_attention_state = (BatchAttentionState(trace[-1]._name, device, trace[-1]._state, set_zeros=True).to(world.dtype), ) if not isinstance(trace[-1], (tuple, list)) else \
tuple([BatchAttentionState(att._name, device, att._state, set_zeros=True).to(world.dtype) for att in trace[-1]])
trace = [None for _ in range(len(program_batch._op_batch_list))]
for i, op_batch in reversed(list(enumerate(program_batch._op_batch_list))):
if len(reversed_dependencies[i]) == 1:
temp = trace[reversed_dependencies[i][0]]
if isinstance(temp, (tuple, list)):
input_tuple = (temp[1],) if i == len(program_batch._op_batch_list) - 2 else (temp[0],)
else:
input_tuple = (temp,)
else:
input_tuple = first_attention_state
x, terminate = self._transform_attention(op_batch._op_id, False, world, op_batch, input_tuple, i == 0, is_training)
# Gate the unaffected questions
# print(op_batch._op_name)
if len(program_batch._dependencies[i]) > 0 and op_batch._mask is not None and isinstance(x, BatchAttentionState) and i != len(program_batch._op_batch_list) - 1:
x = x.gate(input_tuple[0], op_batch._mask)
trace[i] = x
if terminate:
break
# if self._atm is not None:
# attention_transfer = self._atm(program_batch)
# Execution loop
trace = []
for i, op_batch in enumerate(program_batch._op_batch_list):
# print(op_batch._op_name)
if len(program_batch._dependencies[i]) > 1:
input_tuple = tuple(itemgetter(*program_batch._dependencies[i])(trace))
elif len(program_batch._dependencies[i]) == 1:
input_tuple = (trace[program_batch._dependencies[i][0]],)
else:
input_tuple = ()
x, terminate = self._execute(op_batch._op_id, world, op_batch, input_tuple, i == len(program_batch._op_batch_list) - 1, is_training)
# # Apply the transfer function if available
# if self._atm is not None and isinstance(x, BatchVariableSet):
# alpha = attention_transfer[i, :, 0].unsqueeze(1)
# beta = attention_transfer[i, :, 1].unsqueeze(1)
# temp = alpha * x._log_attention
# x._log_attention = temp - util.safe_log((beta * util.log_not(x._log_attention)).exp() + temp.exp())
# Gate the unaffected questions
if isinstance(x, BatchVariableSet) and len(input_tuple) > 0 and op_batch._mask is not None:
x = x.gate(input_tuple[0], op_batch._mask)
trace.append(x)
if terminate:
break
result = trace[-1] if len(trace) > 0 else None
all_results.append(result)
all_traces.append(trace)
result = gather_results(all_results, device, util.is_cuda(device))
if return_trace:
return result, all_traces
return result
| 49.896739 | 216 | 0.615401 |
d025b6ffd1d42bc44b635a1d7b115470cf637c7e | 11,064 | py | Python | sunshine_conversations_client/model/whatsapp_all_of.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 4 | 2020-09-27T14:28:25.000Z | 2022-02-02T13:51:29.000Z | sunshine_conversations_client/model/whatsapp_all_of.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 3 | 2021-09-30T18:18:58.000Z | 2021-12-04T07:55:23.000Z | sunshine_conversations_client/model/whatsapp_all_of.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 5 | 2020-11-07T02:08:18.000Z | 2021-12-07T17:10:23.000Z | # coding: utf-8
"""
Sunshine Conversations API
The version of the OpenAPI document: 9.4.5
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from sunshine_conversations_client.configuration import Configuration
from sunshine_conversations_client.undefined import Undefined
class WhatsappAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'deployment_id': 'str',
'hsm_fallback_language': 'str',
'account_id': 'str',
'account_management_access_token': 'str',
'phone_number': 'str'
}
attribute_map = {
'type': 'type',
'deployment_id': 'deploymentId',
'hsm_fallback_language': 'hsmFallbackLanguage',
'account_id': 'accountId',
'account_management_access_token': 'accountManagementAccessToken',
'phone_number': 'phoneNumber'
}
nulls = set()
def __init__(self, type='whatsapp', deployment_id=None, hsm_fallback_language='en_US', account_id=Undefined(), account_management_access_token=Undefined(), phone_number=Undefined(), local_vars_configuration=None): # noqa: E501
"""WhatsappAllOf - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self._deployment_id = None
self._hsm_fallback_language = None
self._account_id = None
self._account_management_access_token = None
self._phone_number = None
self.discriminator = None
if type is not None:
self.type = type
self.deployment_id = deployment_id
self.hsm_fallback_language = hsm_fallback_language
self.account_id = account_id
self.account_management_access_token = account_management_access_token
self.phone_number = phone_number
@property
def type(self):
"""Gets the type of this WhatsappAllOf. # noqa: E501
To configure a WhatsApp integration, use your WhatsApp API Client connection information. Sunshine Conversations can provide WhatsApp API Client hosting for approved brands. See our [WhatsApp guide](https://docs.smooch.io/guide/whatsapp/#whatsapp-api-client) for more details on WhatsApp API Client hosting. # noqa: E501
:return: The type of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this WhatsappAllOf.
To configure a WhatsApp integration, use your WhatsApp API Client connection information. Sunshine Conversations can provide WhatsApp API Client hosting for approved brands. See our [WhatsApp guide](https://docs.smooch.io/guide/whatsapp/#whatsapp-api-client) for more details on WhatsApp API Client hosting. # noqa: E501
:param type: The type of this WhatsappAllOf. # noqa: E501
:type: str
"""
self._type = type
@property
def deployment_id(self):
"""Gets the deployment_id of this WhatsappAllOf. # noqa: E501
The Id of the deployment. The integrationId and the appId will be added to the deployment. Additionally, the deployment’s status will be set to integrated. # noqa: E501
:return: The deployment_id of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._deployment_id
@deployment_id.setter
def deployment_id(self, deployment_id):
"""Sets the deployment_id of this WhatsappAllOf.
The Id of the deployment. The integrationId and the appId will be added to the deployment. Additionally, the deployment’s status will be set to integrated. # noqa: E501
:param deployment_id: The deployment_id of this WhatsappAllOf. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and deployment_id is None: # noqa: E501
raise ValueError("Invalid value for `deployment_id`, must not be `None`") # noqa: E501
self._deployment_id = deployment_id
@property
def hsm_fallback_language(self):
"""Gets the hsm_fallback_language of this WhatsappAllOf. # noqa: E501
Specify a fallback language to use when sending WhatsApp message template using the short hand syntax. Defaults to en_US. See WhatsApp documentation for more info. # noqa: E501
:return: The hsm_fallback_language of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._hsm_fallback_language
@hsm_fallback_language.setter
def hsm_fallback_language(self, hsm_fallback_language):
"""Sets the hsm_fallback_language of this WhatsappAllOf.
Specify a fallback language to use when sending WhatsApp message template using the short hand syntax. Defaults to en_US. See WhatsApp documentation for more info. # noqa: E501
:param hsm_fallback_language: The hsm_fallback_language of this WhatsappAllOf. # noqa: E501
:type: str
"""
if type(hsm_fallback_language) is Undefined:
hsm_fallback_language = None
self.nulls.discard("hsm_fallback_language")
elif hsm_fallback_language is None:
self.nulls.add("hsm_fallback_language")
else:
self.nulls.discard("hsm_fallback_language")
self._hsm_fallback_language = hsm_fallback_language
@property
def account_id(self):
"""Gets the account_id of this WhatsappAllOf. # noqa: E501
The business ID associated with the WhatsApp account. In combination with accountManagementAccessToken, it’s used for Message Template Reconstruction. # noqa: E501
:return: The account_id of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this WhatsappAllOf.
The business ID associated with the WhatsApp account. In combination with accountManagementAccessToken, it’s used for Message Template Reconstruction. # noqa: E501
:param account_id: The account_id of this WhatsappAllOf. # noqa: E501
:type: str
"""
if type(account_id) is Undefined:
account_id = None
self.nulls.discard("account_id")
elif account_id is None:
self.nulls.add("account_id")
else:
self.nulls.discard("account_id")
self._account_id = account_id
@property
def account_management_access_token(self):
"""Gets the account_management_access_token of this WhatsappAllOf. # noqa: E501
An access token associated with the accountId used to query the WhatsApp Account Management API. In combination with accountId, it’s used for Message Template Reconstruction. # noqa: E501
:return: The account_management_access_token of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._account_management_access_token
@account_management_access_token.setter
def account_management_access_token(self, account_management_access_token):
"""Sets the account_management_access_token of this WhatsappAllOf.
An access token associated with the accountId used to query the WhatsApp Account Management API. In combination with accountId, it’s used for Message Template Reconstruction. # noqa: E501
:param account_management_access_token: The account_management_access_token of this WhatsappAllOf. # noqa: E501
:type: str
"""
if type(account_management_access_token) is Undefined:
account_management_access_token = None
self.nulls.discard("account_management_access_token")
elif account_management_access_token is None:
self.nulls.add("account_management_access_token")
else:
self.nulls.discard("account_management_access_token")
self._account_management_access_token = account_management_access_token
@property
def phone_number(self):
"""Gets the phone_number of this WhatsappAllOf. # noqa: E501
The phone number that is associated with the deployment of this integration, if one exists. # noqa: E501
:return: The phone_number of this WhatsappAllOf. # noqa: E501
:rtype: str
"""
return self._phone_number
@phone_number.setter
def phone_number(self, phone_number):
"""Sets the phone_number of this WhatsappAllOf.
The phone number that is associated with the deployment of this integration, if one exists. # noqa: E501
:param phone_number: The phone_number of this WhatsappAllOf. # noqa: E501
:type: str
"""
if type(phone_number) is Undefined:
phone_number = None
self.nulls.discard("phone_number")
elif phone_number is None:
self.nulls.add("phone_number")
else:
self.nulls.discard("phone_number")
self._phone_number = phone_number
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WhatsappAllOf):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, WhatsappAllOf):
return True
return self.to_dict() != other.to_dict()
| 38.020619 | 330 | 0.659888 |
d6ca871d5729a86e70958ec6dcacd54181b71ba0 | 1,357 | py | Python | save-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | save-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | save-bib.py | 2e0byo/bib | 9d6cd7fcf214894caa4831d948ac868b696b0a02 | [
"CC0-1.0"
] | null | null | null | import bibtexparser
from bibtexparser.bwriter import BibTexWriter
from bibtexparser.bparser import BibTexParser
from pathlib import Path
def load_uniq(fn):
with Path(fn).open() as f:
parser = BibTexParser()
parser.ignore_nonstandard_types = False
parsed = bibtexparser.load(f, parser)
seen = {}
parsed.entries = [
seen.setdefault(x["ID"], x) for x in parsed.entries if x["ID"] not in seen
]
return parsed
bibs = {}
for f in Path(".").glob("*.bib"):
bibs[f.stem] = load_uniq(f)
print("")
total = 0
for k, v in bibs.items():
n = len(v.entries)
print(f"{k}: {n} entries")
total += n
print("Total:", total)
print("")
theology_entries = {x["ID"]: x for x in bibs["theology"].entries}
for name, bib in bibs.items():
if name == "theology":
continue
for entry in bib.entries:
if entry["ID"] in theology_entries:
del theology_entries[entry["ID"]]
bibs["theology"].entries = [v for _, v in theology_entries.items()]
total = 0
for k, v in bibs.items():
n = len(v.entries)
print(f"{k}: {n} entries")
total += n
print("Total:", total)
writer = BibTexWriter()
writer.order_entries_by = ("author", "year")
writer.comma_first = True
for name, obj in bibs.items():
with Path(f"{name}.bib").open("w") as f:
f.write(writer.write(obj))
| 23 | 82 | 0.625645 |
56858b6cae000a8cd4e1d9cb227a02ade643b932 | 5,850 | py | Python | setup.py | moremoban/yehua | e90ac103ec28e1101fd845796c02083d52ddf43e | [
"MIT"
] | 19 | 2017-12-08T04:54:15.000Z | 2021-08-19T19:59:19.000Z | setup.py | moremoban/yehua | e90ac103ec28e1101fd845796c02083d52ddf43e | [
"MIT"
] | 55 | 2018-05-07T04:14:47.000Z | 2021-04-19T17:38:09.000Z | setup.py | chfw/yehua | e90ac103ec28e1101fd845796c02083d52ddf43e | [
"MIT"
] | 3 | 2017-07-01T14:53:57.000Z | 2017-07-23T02:25:05.000Z | #!/usr/bin/env python3
"""
Template by pypi-mobans
"""
import os
import sys
import codecs
import locale
import platform
from shutil import rmtree
from setuptools import Command, setup, find_packages
PY2 = sys.version_info[0] == 2
PY26 = PY2 and sys.version_info[1] < 7
PY33 = sys.version_info < (3, 4)
# Work around mbcs bug in distutils.
# http://bugs.python.org/issue10945
# This work around is only if a project supports Python < 3.4
# Work around for locale not being set
try:
lc = locale.getlocale()
pf = platform.system()
if pf != "Windows" and lc == (None, None):
locale.setlocale(locale.LC_ALL, "C.UTF-8")
except (ValueError, UnicodeError, locale.Error):
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
NAME = "yehua"
AUTHOR = "C. W."
VERSION = "0.1.4"
EMAIL = "wangc_2011@hotmail.com"
LICENSE = "New BSD"
ENTRY_POINTS = {
"console_scripts": [
"yh = yehua.main:main"
],
}
DESCRIPTION = (
"Yet another a project template tool for an organisation."
)
URL = "https://github.com/moremoban/yehua"
DOWNLOAD_URL = "%s/archive/0.1.4.tar.gz" % URL
FILES = ["README.rst","CONTRIBUTORS.rst", "CHANGELOG.rst"]
KEYWORDS = [
"python",
]
CLASSIFIERS = [
"Topic :: Software Development :: Libraries",
"Programming Language :: Python",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
'Topic :: Utilities',
]
PYTHON_REQUIRES = ">=3.6"
INSTALL_REQUIRES = [
"Jinja2",
"moban>=0.6.0",
"colorful",
"rich",
"readchar",
"colorama",
"moban-jinja2-github>=0.0.2",
"moban-ansible",
]
SETUP_COMMANDS = {}
PACKAGES = find_packages(exclude=["ez_setup", "examples", "tests", "tests.*"])
EXTRAS_REQUIRE = {
"pypi-mobans": ['pypi-mobans-pkg>=0.1.4'],
"cookiecutter": ['cookiecutter==1.7.0'],
":python_version == '3.7'": ["ruamel.yaml>=0.15.42"],
":python_version != '3.4' and python_version < '3.7'": ["ruamel.yaml>=0.15.5"],
":python_version == '3.8'": ["ruamel.yaml>=0.15.98"],
}
# You do not need to read beyond this line
PUBLISH_COMMAND = "{0} setup.py sdist bdist_wheel upload -r pypi".format(sys.executable)
HERE = os.path.abspath(os.path.dirname(__file__))
GS_COMMAND = ("gs yehua v0.1.4 " +
"Find 0.1.4 in changelog for more details")
NO_GS_MESSAGE = ("Automatic github release is disabled. " +
"Please install gease to enable it.")
UPLOAD_FAILED_MSG = (
'Upload failed. please run "%s" yourself.' % PUBLISH_COMMAND)
class PublishCommand(Command):
"""Support setup.py upload."""
description = "Build and publish the package on github and pypi"
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print("\033[1m{0}\033[0m".format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status("Removing previous builds...")
rmtree(os.path.join(HERE, "dist"))
rmtree(os.path.join(HERE, "build"))
rmtree(os.path.join(HERE, "yehua.egg-info"))
except OSError:
pass
self.status("Building Source and Wheel (universal) distribution...")
run_status = True
if has_gease():
run_status = os.system(GS_COMMAND) == 0
else:
self.status(NO_GS_MESSAGE)
if run_status:
if os.system(PUBLISH_COMMAND) != 0:
self.status(UPLOAD_FAILED_MSG)
sys.exit()
SETUP_COMMANDS.update({
"publish": PublishCommand
})
def has_gease():
"""
test if github release command is installed
visit http://github.com/moremoban/gease for more info
"""
try:
import gease # noqa
return True
except ImportError:
return False
def read_files(*files):
"""Read files into setup"""
text = ""
for single_file in files:
content = read(single_file)
text = text + content + "\n"
return text
def read(afile):
"""Read a file into setup"""
the_relative_file = os.path.join(HERE, afile)
with codecs.open(the_relative_file, "r", "utf-8") as opened_file:
content = filter_out_test_code(opened_file)
content = "".join(list(content))
return content
def filter_out_test_code(file_handle):
found_test_code = False
for line in file_handle.readlines():
if line.startswith(".. testcode:"):
found_test_code = True
continue
if found_test_code is True:
if line.startswith(" "):
continue
else:
empty_line = line.strip()
if len(empty_line) == 0:
continue
else:
found_test_code = False
yield line
else:
for keyword in ["|version|", "|today|"]:
if keyword in line:
break
else:
yield line
if __name__ == "__main__":
setup(
test_suite="tests",
name=NAME,
author=AUTHOR,
version=VERSION,
author_email=EMAIL,
description=DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
long_description=read_files(*FILES),
license=LICENSE,
keywords=KEYWORDS,
python_requires=PYTHON_REQUIRES,
extras_require=EXTRAS_REQUIRE,
tests_require=["nose"],
install_requires=INSTALL_REQUIRES,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
entry_points=ENTRY_POINTS,
classifiers=CLASSIFIERS,
cmdclass=SETUP_COMMANDS
)
| 26 | 88 | 0.60188 |
92a0cb8347f1ee1a202a8aeb4c0488d3280ad783 | 8,412 | py | Python | venv/Lib/site-packages/prompt_toolkit/styles/defaults.py | mirosa25/ITI-202-Final-Project | b46e7ffccf43a52ee0e72b889c5aac6887228dc2 | [
"MIT"
] | 1,318 | 2019-07-11T10:34:39.000Z | 2022-03-29T15:05:19.000Z | venv/Lib/site-packages/prompt_toolkit/styles/defaults.py | mirosa25/ITI-202-Final-Project | b46e7ffccf43a52ee0e72b889c5aac6887228dc2 | [
"MIT"
] | 387 | 2019-09-05T16:33:09.000Z | 2022-03-31T10:43:39.000Z | venv/Lib/site-packages/prompt_toolkit/styles/defaults.py | mirosa25/ITI-202-Final-Project | b46e7ffccf43a52ee0e72b889c5aac6887228dc2 | [
"MIT"
] | 66 | 2019-11-11T15:33:12.000Z | 2022-03-01T07:55:55.000Z | """
The default styling.
"""
from prompt_toolkit.cache import memoized
from .base import ANSI_COLOR_NAMES
from .named_colors import NAMED_COLORS
from .style import BaseStyle, Style, merge_styles
__all__ = [
"default_ui_style",
"default_pygments_style",
]
#: Default styling. Mapping from classnames to their style definition.
PROMPT_TOOLKIT_STYLE = [
# Highlighting of search matches in document.
("search", "bg:ansibrightyellow ansiblack"),
("search.current", ""),
# Incremental search.
("incsearch", ""),
("incsearch.current", "reverse"),
# Highlighting of select text in document.
("selected", "reverse"),
("cursor-column", "bg:#dddddd"),
("cursor-line", "underline"),
("color-column", "bg:#ccaacc"),
# Highlighting of matching brackets.
("matching-bracket", ""),
("matching-bracket.other", "#000000 bg:#aacccc"),
("matching-bracket.cursor", "#ff8888 bg:#880000"),
# Styling of other cursors, in case of block editing.
("multiple-cursors", "#000000 bg:#ccccaa"),
# Line numbers.
("line-number", "#888888"),
("line-number.current", "bold"),
("tilde", "#8888ff"),
# Default prompt.
("prompt", ""),
("prompt.arg", "noinherit"),
("prompt.arg.text", ""),
("prompt.search", "noinherit"),
("prompt.search.text", ""),
# Search toolbar.
("search-toolbar", "bold"),
("search-toolbar.text", "nobold"),
# System toolbar
("system-toolbar", "bold"),
("system-toolbar.text", "nobold"),
# "arg" toolbar.
("arg-toolbar", "bold"),
("arg-toolbar.text", "nobold"),
# Validation toolbar.
("validation-toolbar", "bg:#550000 #ffffff"),
("window-too-small", "bg:#550000 #ffffff"),
# Completions toolbar.
("completion-toolbar", "bg:#bbbbbb #000000"),
("completion-toolbar.arrow", "bg:#bbbbbb #000000 bold"),
("completion-toolbar.completion", "bg:#bbbbbb #000000"),
("completion-toolbar.completion.current", "bg:#444444 #ffffff"),
# Completions menu.
("completion-menu", "bg:#bbbbbb #000000"),
("completion-menu.completion", ""),
("completion-menu.completion.current", "bg:#888888 #ffffff"),
("completion-menu.meta.completion", "bg:#999999 #000000"),
("completion-menu.meta.completion.current", "bg:#aaaaaa #000000"),
("completion-menu.multi-column-meta", "bg:#aaaaaa #000000"),
# Fuzzy matches in completion menu (for FuzzyCompleter).
("completion-menu.completion fuzzymatch.outside", "fg:#444444"),
("completion-menu.completion fuzzymatch.inside", "bold"),
("completion-menu.completion fuzzymatch.inside.character", "underline"),
("completion-menu.completion.current fuzzymatch.outside", "fg:default"),
("completion-menu.completion.current fuzzymatch.inside", "nobold"),
# Styling of readline-like completions.
("readline-like-completions", ""),
("readline-like-completions.completion", ""),
("readline-like-completions.completion fuzzymatch.outside", "#888888"),
("readline-like-completions.completion fuzzymatch.inside", ""),
("readline-like-completions.completion fuzzymatch.inside.character", "underline"),
# Scrollbars.
("scrollbar.background", "bg:#aaaaaa"),
("scrollbar.button", "bg:#444444"),
("scrollbar.arrow", "noinherit bold"),
# Start/end of scrollbars. Adding 'underline' here provides a nice little
# detail to the progress bar, but it doesn't look good on all terminals.
# ('scrollbar.start', 'underline #ffffff'),
# ('scrollbar.end', 'underline #000000'),
# Auto suggestion text.
("auto-suggestion", "#666666"),
# Trailing whitespace and tabs.
("trailing-whitespace", "#999999"),
("tab", "#999999"),
# When Control-C/D has been pressed. Grayed.
("aborting", "#888888 bg:default noreverse noitalic nounderline noblink"),
("exiting", "#888888 bg:default noreverse noitalic nounderline noblink"),
# Entering a Vi digraph.
("digraph", "#4444ff"),
# Control characters, like ^C, ^X.
("control-character", "ansiblue"),
# Non-breaking space.
("nbsp", "underline ansiyellow"),
# Default styling of HTML elements.
("i", "italic"),
("u", "underline"),
("b", "bold"),
("em", "italic"),
("strong", "bold"),
("hidden", "hidden"),
# It should be possible to use the style names in HTML.
# <reverse>...</reverse> or <noreverse>...</noreverse>.
("italic", "italic"),
("underline", "underline"),
("bold", "bold"),
("reverse", "reverse"),
("noitalic", "noitalic"),
("nounderline", "nounderline"),
("nobold", "nobold"),
("noreverse", "noreverse"),
# Prompt bottom toolbar
("bottom-toolbar", "reverse"),
]
# Style that will turn for instance the class 'red' into 'red'.
COLORS_STYLE = [(name, "fg:" + name) for name in ANSI_COLOR_NAMES] + [
(name.lower(), "fg:" + name) for name in NAMED_COLORS
]
WIDGETS_STYLE = [
# Dialog windows.
("dialog", "bg:#4444ff"),
("dialog.body", "bg:#ffffff #000000"),
("dialog.body text-area", "bg:#cccccc"),
("dialog.body text-area last-line", "underline"),
("dialog frame.label", "#ff0000 bold"),
# Scrollbars in dialogs.
("dialog.body scrollbar.background", ""),
("dialog.body scrollbar.button", "bg:#000000"),
("dialog.body scrollbar.arrow", ""),
("dialog.body scrollbar.start", "nounderline"),
("dialog.body scrollbar.end", "nounderline"),
# Buttons.
("button", ""),
("button.arrow", "bold"),
("button.focused", "bg:#aa0000 #ffffff"),
# Menu bars.
("menu-bar", "bg:#aaaaaa #000000"),
("menu-bar.selected-item", "bg:#ffffff #000000"),
("menu", "bg:#888888 #ffffff"),
("menu.border", "#aaaaaa"),
("menu.border shadow", "#444444"),
# Shadows.
("dialog shadow", "bg:#000088"),
("dialog.body shadow", "bg:#aaaaaa"),
("progress-bar", "bg:#000088"),
("progress-bar.used", "bg:#ff0000"),
]
# The default Pygments style, include this by default in case a Pygments lexer
# is used.
PYGMENTS_DEFAULT_STYLE = {
"pygments.whitespace": "#bbbbbb",
"pygments.comment": "italic #408080",
"pygments.comment.preproc": "noitalic #bc7a00",
"pygments.keyword": "bold #008000",
"pygments.keyword.pseudo": "nobold",
"pygments.keyword.type": "nobold #b00040",
"pygments.operator": "#666666",
"pygments.operator.word": "bold #aa22ff",
"pygments.name.builtin": "#008000",
"pygments.name.function": "#0000ff",
"pygments.name.class": "bold #0000ff",
"pygments.name.namespace": "bold #0000ff",
"pygments.name.exception": "bold #d2413a",
"pygments.name.variable": "#19177c",
"pygments.name.constant": "#880000",
"pygments.name.label": "#a0a000",
"pygments.name.entity": "bold #999999",
"pygments.name.attribute": "#7d9029",
"pygments.name.tag": "bold #008000",
"pygments.name.decorator": "#aa22ff",
# Note: In Pygments, Token.String is an alias for Token.Literal.String,
# and Token.Number as an alias for Token.Literal.Number.
"pygments.literal.string": "#ba2121",
"pygments.literal.string.doc": "italic",
"pygments.literal.string.interpol": "bold #bb6688",
"pygments.literal.string.escape": "bold #bb6622",
"pygments.literal.string.regex": "#bb6688",
"pygments.literal.string.symbol": "#19177c",
"pygments.literal.string.other": "#008000",
"pygments.literal.number": "#666666",
"pygments.generic.heading": "bold #000080",
"pygments.generic.subheading": "bold #800080",
"pygments.generic.deleted": "#a00000",
"pygments.generic.inserted": "#00a000",
"pygments.generic.error": "#ff0000",
"pygments.generic.emph": "italic",
"pygments.generic.strong": "bold",
"pygments.generic.prompt": "bold #000080",
"pygments.generic.output": "#888",
"pygments.generic.traceback": "#04d",
"pygments.error": "border:#ff0000",
}
@memoized()
def default_ui_style() -> BaseStyle:
"""
Create a default `Style` object.
"""
return merge_styles(
[
Style(PROMPT_TOOLKIT_STYLE),
Style(COLORS_STYLE),
Style(WIDGETS_STYLE),
]
)
@memoized()
def default_pygments_style() -> Style:
"""
Create a `Style` object that contains the default Pygments style.
"""
return Style.from_dict(PYGMENTS_DEFAULT_STYLE)
| 36.894737 | 86 | 0.628507 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.